From 3a2e40a84f571fcf3cfc7b142c91df19b95efc63 Mon Sep 17 00:00:00 2001 From: Duda Martins <109083465+mariammartins@users.noreply.github.com> Date: Thu, 13 Jun 2024 14:01:44 -0300 Subject: [PATCH] Refactor 5-app-infra-step (#48) * Initial commit * moving gcp-policies step to root README * refactoring the publish-artifacts and service-catalog steps to the root README * refactoring the artifact publish and service catalog repos steps * changing step 5-vps-sc to root README * refactoring assets * updata service catalog module name * add VPC-SC on root README * add VPC-SC bash commands * update VPC-SC bash commands * changing step 7-ml-post-deployment to 6-ml README * refactoring modules and source repos * fix 6-machine-learning step after app-infra refactoring * update 6-machine-learning step README * update README * update README * Update 5-app-infra/README.md Co-authored-by: Caetano Colin <164910343+caetano-colin@users.noreply.github.com> * update README * remove VPC-SC step * insertion of the VPC-SC step as a prerequisite for the example * merge of 6-machine-learning-steps and 6-mal-pipeline steps * Update 5-app-infra/README.md Co-authored-by: Caetano Colin <164910343+caetano-colin@users.noreply.github.com> * Update 5-app-infra/README.md Co-authored-by: Caetano Colin <164910343+caetano-colin@users.noreply.github.com> * Update 5-app-infra/README.md Co-authored-by: Caetano Colin <164910343+caetano-colin@users.noreply.github.com> * update 6-machine-learning README * update README * update README * update 6-machine-learning step * update title * update README with terraform locally steps * update README * fix merge * fix merge * fix merge * update README * update README * fix lint and example * rename reference * fix end of file * fix title Machine Learning Pipeline * fix whitespace --------- Co-authored-by: Caetano Colin <164910343+caetano-colin@users.noreply.github.com> --- .github/workflows/lint.yaml | 4 +- 5-app-infra/0-gcp-policies/README.md | 90 -- 5-app-infra/1-artifact-publish/.gitignore | 58 - 5-app-infra/1-artifact-publish/README.md | 280 ----- .../business_unit_3/shared/README.md | 15 - .../business_unit_3/shared/backend.tf | 22 - .../business_unit_3/shared/locals.tf | 20 - .../business_unit_3/shared/outputs.tf | 34 - .../shared/publish_artifacts.tf | 37 - .../business_unit_3/shared/remote.tf | 30 - .../business_unit_3/shared/variables.tf | 25 - .../common.auto.example.tfvars | 19 - 5-app-infra/2-artifact-publish-repo/README.md | 86 -- .../images/tf2-cpu.2-13:0.1/Dockerfile | 17 - .../images/tf2-gpu.2-13:0.1/Dockerfile | 16 - 5-app-infra/3-service-catalog/.gitignore | 58 - 5-app-infra/3-service-catalog/README.md | 298 ----- .../business_unit_3/shared/README.md | 16 - .../business_unit_3/shared/backend.tf | 22 - .../business_unit_3/shared/locals.tf | 21 - .../business_unit_3/shared/outputs.tf | 25 - .../business_unit_3/shared/remote.tf | 39 - .../business_unit_3/shared/service_catalog.tf | 25 - .../business_unit_3/shared/variables.tf | 25 - .../common.auto.example.tfvars | 26 - 5-app-infra/4-service-catalog-repo/README.md | 164 --- .../4-service-catalog-repo/img/workflow.png | Bin 87776 -> 0 bytes .../modules/artifact_registry/README.md | 74 -- .../modules/artifact_registry/data.tf | 34 - .../modules/artifact_registry/locals.tf | 23 - .../modules/artifact_registry/main.tf | 64 -- .../terraform.tfvars.example | 43 - .../modules/artifact_registry/variables.tf | 86 -- .../modules/bigquery/README.md | 76 -- .../modules/bigquery/data.tf | 34 - .../modules/bigquery/locals.tf | 21 - .../modules/bigquery/main.tf | 24 - .../modules/bigquery/variables.tf | 66 -- .../modules/bucket/README.md | 149 --- .../modules/bucket/data.tf | 38 - .../modules/bucket/locals.tf | 20 - .../modules/bucket/main.tf | 148 --- .../modules/bucket/outputs.tf | 20 - .../modules/bucket/variables.tf | 186 --- .../modules/composer/README.md | 129 --- .../modules/composer/data.tf | 66 -- .../modules/composer/locals.tf | 74 -- .../modules/composer/main.tf | 93 -- .../modules/composer/outputs.tf | 40 - .../modules/composer/pipeline.tf | 121 -- .../modules/composer/terraform.tfvars.example | 29 - .../modules/composer/variables.tf | 138 --- .../modules/composer/vpc.tf | 288 ----- .../modules/metadata/README.md | 70 -- .../modules/metadata/data.tf | 34 - .../modules/metadata/locals.tf | 19 - .../modules/metadata/main.tf | 26 - .../modules/metadata/outputs.tf | 20 - .../modules/metadata/variables.tf | 37 - .../modules/notebook/README.md | 137 --- .../modules/notebook/data.tf | 65 -- .../modules/notebook/locals.tf | 20 - .../modules/notebook/main.tf | 95 -- .../modules/notebook/outputs.tf | 50 - .../modules/notebook/variables.tf | 150 --- .../modules/pubsub/README.md | 82 -- .../modules/pubsub/data.tf | 35 - .../modules/pubsub/locals.tf | 20 - .../modules/pubsub/main.tf | 39 - .../modules/pubsub/outputs.tf | 20 - .../modules/pubsub/variables.tf | 52 - .../modules/secrets/README.md | 79 -- .../modules/secrets/data.tf | 39 - .../modules/secrets/locals.tf | 20 - .../modules/secrets/main.tf | 56 - .../modules/secrets/outputs.tf | 20 - .../modules/secrets/variables.tf | 36 - .../modules/tensorboard/README.md | 69 -- .../modules/tensorboard/data.tf | 36 - .../modules/tensorboard/locals.tf | 19 - .../modules/tensorboard/main.tf | 26 - .../modules/tensorboard/outputs.tf | 20 - .../modules/tensorboard/variables.tf | 37 - 5-app-infra/5-vpc-sc/README.md | 175 --- 5-app-infra/6-machine-learning/README.md | 407 ------- .../non-production/common.auto.tfvars | 1 - .../non-production/versions.tf | 43 - .../production/common.auto.tfvars | 1 - .../business_unit_3/production/versions.tf | 43 - .../README.md | 252 ----- 5-app-infra/README.md | 721 +++++++++++- .../modules/publish_artifacts/data.tf | 0 .../modules/publish_artifacts/locals.tf | 0 .../modules/publish_artifacts/main.tf | 0 .../modules/publish_artifacts/outputs.tf | 0 .../modules/publish_artifacts/variables.tf | 0 .../modules/publish_artifacts/versions.tf | 0 .../service_catalog}/data.tf | 0 .../service_catalog}/locals.tf | 0 .../service_catalog}/main.tf | 0 .../service_catalog}/outputs.tf | 0 .../service_catalog}/variables.tf | 0 .../projects/artifact-publish/README.md | 344 ------ .../shared/publish_artifacts.tf | 2 +- .../modules/publish_artifacts/data.tf | 35 - .../modules/publish_artifacts/locals.tf | 33 - .../modules/publish_artifacts/main.tf | 162 --- .../modules/publish_artifacts/outputs.tf | 20 - .../modules/publish_artifacts/variables.tf | 71 -- .../modules/publish_artifacts/versions.tf | 46 - .../projects/machine-learning/.gitignore | 16 - .../projects/machine-learning/README.md | 444 -------- .../business_unit_3/development/README.md | 15 - .../business_unit_3/development/backend.tf | 22 - .../development/common.auto.tfvars | 1 - .../business_unit_3/development/locals.tf | 23 - .../business_unit_3/development/main.tf | 26 - .../business_unit_3/development/outputs.tf | 16 - .../business_unit_3/development/remote.tf | 40 - .../business_unit_3/development/variables.tf | 35 - .../business_unit_3/development/versions.tf | 43 - .../business_unit_3/non-production/README.md | 20 - .../business_unit_3/non-production/backend.tf | 22 - .../non-production/common.auto.tfvars | 1 - .../business_unit_3/non-production/locals.tf | 23 - .../business_unit_3/non-production/main.tf | 56 - .../business_unit_3/non-production/outputs.tf | 49 - .../business_unit_3/non-production/remote.tf | 40 - .../non-production/variables.tf | 35 - .../non-production/versions.tf | 43 - .../business_unit_3/production/README.md | 20 - .../business_unit_3/production/backend.tf | 22 - .../production/common.auto.tfvars | 1 - .../business_unit_3/production/locals.tf | 23 - .../business_unit_3/production/main.tf | 57 - .../business_unit_3/production/outputs.tf | 49 - .../business_unit_3/production/remote.tf | 40 - .../business_unit_3/production/variables.tf | 35 - .../business_unit_3/production/versions.tf | 43 - .../common.auto.example.tfvars | 23 - .../machine-learning/modules/base_env/data.tf | 37 - .../machine-learning/modules/base_env/iam.tf | 169 --- .../machine-learning/modules/base_env/main.tf | 121 -- .../modules/base_env/outputs.tf | 50 - .../modules/base_env/roles.tf | 293 ----- .../modules/base_env/variables.tf | 380 ------- .../projects/service-catalog/README.md | 361 ------ .../business_unit_3/shared/common.auto.tfvars | 1 - .../business_unit_3/shared/service_catalog.tf | 2 +- .../service-catalog/modules/svc_ctlg/data.tf | 39 - .../modules/svc_ctlg/locals.tf | 41 - .../service-catalog/modules/svc_ctlg/main.tf | 143 --- .../modules/svc_ctlg/outputs.tf | 25 - .../modules/svc_ctlg/variables.tf | 44 - .../images/tf2-cpu.2-8:01/Dockerfile | 0 .../images/vertexpipeline:v2/Dockerfile | 0 6-ml-pipeline/dev/Readme.md | 164 --- Makefile | 2 +- README.md | 2 +- docs/assets/terraform/1-org/README.md | 11 + .../assets/terraform/2-environments/README.md | 11 + .../machine-learning-pipeline}/.gitignore | 0 examples/machine-learning-pipeline/README.md | 1002 +++++++++++++++++ .../business_unit_3/development/README.md | 0 .../business_unit_3/development/backend.tf | 0 .../development}/common.auto.tfvars | 0 .../business_unit_3/development/locals.tf | 0 .../business_unit_3/development/main.tf | 0 .../business_unit_3/development/outputs.tf | 0 .../business_unit_3/development/remote.tf | 0 .../business_unit_3/development/variables.tf | 0 .../business_unit_3/development}/versions.tf | 0 .../business_unit_3/non-production/README.md | 0 .../business_unit_3/non-production/backend.tf | 0 .../non-production}/common.auto.tfvars | 0 .../business_unit_3/non-production/locals.tf | 0 .../business_unit_3/non-production/main.tf | 0 .../business_unit_3/non-production/outputs.tf | 0 .../business_unit_3/non-production/remote.tf | 0 .../non-production/variables.tf | 0 .../non-production}/versions.tf | 0 .../business_unit_3/production/README.md | 0 .../business_unit_3/production/backend.tf | 0 .../production}/common.auto.tfvars | 0 .../business_unit_3/production/locals.tf | 0 .../business_unit_3/production/main.tf | 0 .../business_unit_3/production/outputs.tf | 0 .../business_unit_3/production/remote.tf | 0 .../business_unit_3/production/variables.tf | 0 .../business_unit_3/production}/versions.tf | 0 .../common.auto.example.tfvars | 0 .../modules/base_env/data.tf | 0 .../modules/base_env/iam.tf | 0 .../modules/base_env/main.tf | 0 .../modules/base_env/outputs.tf | 0 .../modules/base_env/roles.tf | 0 .../modules/base_env/variables.tf | 0 197 files changed, 1691 insertions(+), 9830 deletions(-) delete mode 100644 5-app-infra/0-gcp-policies/README.md delete mode 100644 5-app-infra/1-artifact-publish/.gitignore delete mode 100644 5-app-infra/1-artifact-publish/README.md delete mode 100644 5-app-infra/1-artifact-publish/business_unit_3/shared/README.md delete mode 100644 5-app-infra/1-artifact-publish/business_unit_3/shared/backend.tf delete mode 100644 5-app-infra/1-artifact-publish/business_unit_3/shared/locals.tf delete mode 100644 5-app-infra/1-artifact-publish/business_unit_3/shared/outputs.tf delete mode 100644 5-app-infra/1-artifact-publish/business_unit_3/shared/publish_artifacts.tf delete mode 100644 5-app-infra/1-artifact-publish/business_unit_3/shared/remote.tf delete mode 100644 5-app-infra/1-artifact-publish/business_unit_3/shared/variables.tf delete mode 100644 5-app-infra/1-artifact-publish/common.auto.example.tfvars delete mode 100644 5-app-infra/2-artifact-publish-repo/README.md delete mode 100644 5-app-infra/2-artifact-publish-repo/images/tf2-cpu.2-13:0.1/Dockerfile delete mode 100644 5-app-infra/2-artifact-publish-repo/images/tf2-gpu.2-13:0.1/Dockerfile delete mode 100644 5-app-infra/3-service-catalog/.gitignore delete mode 100644 5-app-infra/3-service-catalog/README.md delete mode 100644 5-app-infra/3-service-catalog/business_unit_3/shared/README.md delete mode 100644 5-app-infra/3-service-catalog/business_unit_3/shared/backend.tf delete mode 100644 5-app-infra/3-service-catalog/business_unit_3/shared/locals.tf delete mode 100644 5-app-infra/3-service-catalog/business_unit_3/shared/outputs.tf delete mode 100644 5-app-infra/3-service-catalog/business_unit_3/shared/remote.tf delete mode 100644 5-app-infra/3-service-catalog/business_unit_3/shared/service_catalog.tf delete mode 100644 5-app-infra/3-service-catalog/business_unit_3/shared/variables.tf delete mode 100644 5-app-infra/3-service-catalog/common.auto.example.tfvars delete mode 100644 5-app-infra/4-service-catalog-repo/README.md delete mode 100644 5-app-infra/4-service-catalog-repo/img/workflow.png delete mode 100644 5-app-infra/4-service-catalog-repo/modules/artifact_registry/README.md delete mode 100644 5-app-infra/4-service-catalog-repo/modules/artifact_registry/data.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/artifact_registry/locals.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/artifact_registry/main.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/artifact_registry/terraform.tfvars.example delete mode 100644 5-app-infra/4-service-catalog-repo/modules/artifact_registry/variables.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/bigquery/README.md delete mode 100644 5-app-infra/4-service-catalog-repo/modules/bigquery/data.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/bigquery/locals.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/bigquery/main.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/bigquery/variables.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/bucket/README.md delete mode 100644 5-app-infra/4-service-catalog-repo/modules/bucket/data.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/bucket/locals.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/bucket/main.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/bucket/outputs.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/bucket/variables.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/composer/README.md delete mode 100644 5-app-infra/4-service-catalog-repo/modules/composer/data.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/composer/locals.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/composer/main.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/composer/outputs.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/composer/pipeline.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/composer/terraform.tfvars.example delete mode 100644 5-app-infra/4-service-catalog-repo/modules/composer/variables.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/composer/vpc.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/metadata/README.md delete mode 100644 5-app-infra/4-service-catalog-repo/modules/metadata/data.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/metadata/locals.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/metadata/main.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/metadata/outputs.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/metadata/variables.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/notebook/README.md delete mode 100644 5-app-infra/4-service-catalog-repo/modules/notebook/data.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/notebook/locals.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/notebook/main.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/notebook/outputs.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/notebook/variables.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/pubsub/README.md delete mode 100644 5-app-infra/4-service-catalog-repo/modules/pubsub/data.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/pubsub/locals.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/pubsub/main.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/pubsub/outputs.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/pubsub/variables.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/secrets/README.md delete mode 100644 5-app-infra/4-service-catalog-repo/modules/secrets/data.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/secrets/locals.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/secrets/main.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/secrets/outputs.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/secrets/variables.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/tensorboard/README.md delete mode 100644 5-app-infra/4-service-catalog-repo/modules/tensorboard/data.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/tensorboard/locals.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/tensorboard/main.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/tensorboard/outputs.tf delete mode 100644 5-app-infra/4-service-catalog-repo/modules/tensorboard/variables.tf delete mode 100644 5-app-infra/5-vpc-sc/README.md delete mode 100644 5-app-infra/6-machine-learning/README.md delete mode 120000 5-app-infra/6-machine-learning/business_unit_3/non-production/common.auto.tfvars delete mode 100644 5-app-infra/6-machine-learning/business_unit_3/non-production/versions.tf delete mode 120000 5-app-infra/6-machine-learning/business_unit_3/production/common.auto.tfvars delete mode 100644 5-app-infra/6-machine-learning/business_unit_3/production/versions.tf delete mode 100644 5-app-infra/7-machine-learning-post-deployment/README.md rename 5-app-infra/{1-artifact-publish => }/modules/publish_artifacts/data.tf (100%) rename 5-app-infra/{1-artifact-publish => }/modules/publish_artifacts/locals.tf (100%) rename 5-app-infra/{1-artifact-publish => }/modules/publish_artifacts/main.tf (100%) rename 5-app-infra/{1-artifact-publish => }/modules/publish_artifacts/outputs.tf (100%) rename 5-app-infra/{1-artifact-publish => }/modules/publish_artifacts/variables.tf (100%) rename 5-app-infra/{1-artifact-publish => }/modules/publish_artifacts/versions.tf (100%) rename 5-app-infra/{3-service-catalog/modules/svc_ctlg => modules/service_catalog}/data.tf (100%) rename 5-app-infra/{3-service-catalog/modules/svc_ctlg => modules/service_catalog}/locals.tf (100%) rename 5-app-infra/{3-service-catalog/modules/svc_ctlg => modules/service_catalog}/main.tf (100%) rename 5-app-infra/{3-service-catalog/modules/svc_ctlg => modules/service_catalog}/outputs.tf (100%) rename 5-app-infra/{3-service-catalog/modules/svc_ctlg => modules/service_catalog}/variables.tf (100%) delete mode 100644 5-app-infra/projects/artifact-publish/README.md delete mode 100644 5-app-infra/projects/artifact-publish/modules/publish_artifacts/data.tf delete mode 100644 5-app-infra/projects/artifact-publish/modules/publish_artifacts/locals.tf delete mode 100644 5-app-infra/projects/artifact-publish/modules/publish_artifacts/main.tf delete mode 100644 5-app-infra/projects/artifact-publish/modules/publish_artifacts/outputs.tf delete mode 100644 5-app-infra/projects/artifact-publish/modules/publish_artifacts/variables.tf delete mode 100644 5-app-infra/projects/artifact-publish/modules/publish_artifacts/versions.tf delete mode 100644 5-app-infra/projects/machine-learning/.gitignore delete mode 100644 5-app-infra/projects/machine-learning/README.md delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/development/README.md delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/development/backend.tf delete mode 120000 5-app-infra/projects/machine-learning/business_unit_3/development/common.auto.tfvars delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/development/locals.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/development/main.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/development/outputs.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/development/remote.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/development/variables.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/development/versions.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/non-production/README.md delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/non-production/backend.tf delete mode 120000 5-app-infra/projects/machine-learning/business_unit_3/non-production/common.auto.tfvars delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/non-production/locals.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/non-production/main.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/non-production/outputs.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/non-production/remote.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/non-production/variables.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/non-production/versions.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/production/README.md delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/production/backend.tf delete mode 120000 5-app-infra/projects/machine-learning/business_unit_3/production/common.auto.tfvars delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/production/locals.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/production/main.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/production/outputs.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/production/remote.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/production/variables.tf delete mode 100644 5-app-infra/projects/machine-learning/business_unit_3/production/versions.tf delete mode 100644 5-app-infra/projects/machine-learning/common.auto.example.tfvars delete mode 100644 5-app-infra/projects/machine-learning/modules/base_env/data.tf delete mode 100644 5-app-infra/projects/machine-learning/modules/base_env/iam.tf delete mode 100644 5-app-infra/projects/machine-learning/modules/base_env/main.tf delete mode 100644 5-app-infra/projects/machine-learning/modules/base_env/outputs.tf delete mode 100644 5-app-infra/projects/machine-learning/modules/base_env/roles.tf delete mode 100644 5-app-infra/projects/machine-learning/modules/base_env/variables.tf delete mode 120000 5-app-infra/projects/service-catalog/business_unit_3/shared/common.auto.tfvars delete mode 100644 5-app-infra/projects/service-catalog/modules/svc_ctlg/data.tf delete mode 100644 5-app-infra/projects/service-catalog/modules/svc_ctlg/locals.tf delete mode 100644 5-app-infra/projects/service-catalog/modules/svc_ctlg/main.tf delete mode 100644 5-app-infra/projects/service-catalog/modules/svc_ctlg/outputs.tf delete mode 100644 5-app-infra/projects/service-catalog/modules/svc_ctlg/variables.tf rename 5-app-infra/{2-artifact-publish-repo => source_repos/artifact-publish}/images/tf2-cpu.2-8:01/Dockerfile (100%) rename 5-app-infra/{2-artifact-publish-repo => source_repos/artifact-publish}/images/vertexpipeline:v2/Dockerfile (100%) delete mode 100644 6-ml-pipeline/dev/Readme.md rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/.gitignore (100%) create mode 100644 examples/machine-learning-pipeline/README.md rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/development/README.md (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/development/backend.tf (100%) rename {5-app-infra/1-artifact-publish/business_unit_3/shared => examples/machine-learning-pipeline/business_unit_3/development}/common.auto.tfvars (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/development/locals.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/development/main.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/development/outputs.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/development/remote.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/development/variables.tf (100%) rename {5-app-infra/1-artifact-publish/business_unit_3/shared => examples/machine-learning-pipeline/business_unit_3/development}/versions.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/non-production/README.md (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/non-production/backend.tf (100%) rename {5-app-infra/3-service-catalog/business_unit_3/shared => examples/machine-learning-pipeline/business_unit_3/non-production}/common.auto.tfvars (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/non-production/locals.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/non-production/main.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/non-production/outputs.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/non-production/remote.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/non-production/variables.tf (100%) rename {5-app-infra/3-service-catalog/business_unit_3/shared => examples/machine-learning-pipeline/business_unit_3/non-production}/versions.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/production/README.md (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/production/backend.tf (100%) rename {5-app-infra/6-machine-learning/business_unit_3/development => examples/machine-learning-pipeline/business_unit_3/production}/common.auto.tfvars (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/production/locals.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/production/main.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/production/outputs.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/production/remote.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/business_unit_3/production/variables.tf (100%) rename {5-app-infra/6-machine-learning/business_unit_3/development => examples/machine-learning-pipeline/business_unit_3/production}/versions.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/common.auto.example.tfvars (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/modules/base_env/data.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/modules/base_env/iam.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/modules/base_env/main.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/modules/base_env/outputs.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/modules/base_env/roles.tf (100%) rename {5-app-infra/6-machine-learning => examples/machine-learning-pipeline}/modules/base_env/variables.tf (100%) diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml index 0b89cae9..2d576017 100644 --- a/.github/workflows/lint.yaml +++ b/.github/workflows/lint.yaml @@ -48,10 +48,10 @@ jobs: env: DISABLE_TFLINT: 1 ENABLE_PARALLEL: 0 - EXCLUDE_LINT_DIRS: \./5-app-infra/6-machine-learning|\./5-app-infra/projects/machine-learning|\./docs/assets/terraform + EXCLUDE_LINT_DIRS: \./examples/machine-learning-pipeline|\./docs/assets/terraform - run: docker run --rm -e DISABLE_TFLINT -e ENABLE_PARALLEL -e EXCLUDE_LINT_DIRS -v ${{ github.workspace }}:/workspace ${{ steps.variables.outputs.dev-tools }} /usr/local/bin/test_lint.sh env: DISABLE_TFLINT: 1 ENABLE_PARALLEL: 0 - EXCLUDE_LINT_DIRS: \./5-app-infra/6-machine-learning|\./5-app-infra/projects/machine-learning|\./docs/assets/terraform + EXCLUDE_LINT_DIRS: \./examples/machine-learning-pipeline|\./docs/assets/terraform diff --git a/5-app-infra/0-gcp-policies/README.md b/5-app-infra/0-gcp-policies/README.md deleted file mode 100644 index 8e75a01e..00000000 --- a/5-app-infra/0-gcp-policies/README.md +++ /dev/null @@ -1,90 +0,0 @@ -# 5-app-infra - -This repo is part of a multi-part guide that shows how to configure and deploy -the example.com reference architecture described in -[Google Cloud security foundations guide](https://cloud.google.com/architecture/security-foundations). The following table lists the parts of the guide. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
0-bootstrapBootstraps a Google Cloud organization, creating all the required resources -and permissions to start using the Cloud Foundation Toolkit (CFT). This -step also configures a CI/CD Pipeline for foundations code in subsequent -stages.
1-orgSets up top-level shared folders, monitoring and networking projects, -organization-level logging, and baseline security settings through -organizational policies.
2-environmentsSets up development, non-production, and production environments within the -Google Cloud organization that you've created.
3-networks-dual-svpcSets up base and restricted shared VPCs with default DNS, NAT (optional), -Private Service networking, VPC service controls, on-premises Dedicated -Interconnect, and baseline firewall rules for each environment. It also sets -up the global DNS hub.
3-networks-hub-and-spokeSets up base and restricted shared VPCs with all the default configuration -found on step 3-networks-dual-svpc, but here the architecture will be based on the -Hub and Spoke network model. It also sets up the global DNS hub
4-projectsSets up a folder structure, projects, and an application infrastructure pipeline for applications, - which are connected as service projects to the shared VPC created in the previous stage.
5-app-infra - 01-gcp-polcies(this file)Configures GCP Policies repository for Cloud Build
- -For an overview of the architecture and the parts, see the -[terraform-google-enterprise-genai README](https://github.com/terraform-google-modules/terraform-google-enterprise-genai) -file. - -## Purpose - -When using Cloud Build for deployment, Policy Verification must be checked. This procedure will configure a gcp-policy repository that will be used in subsequent deployments in `5-app-infra` - -### Deploying with Cloud Build - -1. Ensure you are in a neutral directory outside any other git related repositories. - -1. Clone the `gcp-policies` repo based on the Terraform output from the `0-bootstrap` step. -Clone the repo at the same level of the `terraform-google-enterprise-genai` folder, the following instructions assume this layout. -Run `terraform output cloudbuild_project_id` in the `0-bootstrap` folder to get the Cloud Build Project ID. - - ```bash - export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${INFRA_PIPELINE_PROJECT_ID} - - gcloud source repos clone gcp-policies gcp-policies-app-infra --project=${INFRA_PIPELINE_PROJECT_ID} - ``` - - **Note:** `gcp-policies` repo has the same name as the repo created in step `1-org`. In order to prevent a collision, the previous command will clone this repo in the folder `gcp-policies-app-infra`. - -1. Commit changes and push your main branch to the new repo. - - ```bash - git add . - git commit -m 'Initialize policy library repo' - - git push --set-upstream origin main - ``` - -1. Navigate out of the repo. - - ```bash - cd .. - ``` diff --git a/5-app-infra/1-artifact-publish/.gitignore b/5-app-infra/1-artifact-publish/.gitignore deleted file mode 100644 index fe232a04..00000000 --- a/5-app-infra/1-artifact-publish/.gitignore +++ /dev/null @@ -1,58 +0,0 @@ -# OSX leaves these everywhere on SMB shares -._* - -# OSX trash -.DS_Store - -# Python -*.pyc - -# Emacs save files -*~ -\#*\# -.\#* - -# Vim-related files -[._]*.s[a-w][a-z] -[._]s[a-w][a-z] -*.un~ -Session.vim -.netrwhist - -### https://raw.github.com/github/gitignore/90f149de451a5433aebd94d02d11b0e28843a1af/Terraform.gitignore - -# Local .terraform directories -**/.terraform/* - -# .tfstate files -*.tfstate -*.tfstate.* - -# Crash log files -crash.log - -# Ignore any .tfvars files that are generated automatically for each Terraform run. Most -# .tfvars files are managed as part of configuration and so should be included in -# version control. -# -# example.tfvars - -# Ignore override files as they are usually used to override resources locally and so -# are not checked in -override.tf -override.tf.json -*_override.tf -*_override.tf.json -.idea/ -.vscode/ -# Kitchen files -**/inspec.lock -**.gem -**/.kitchen -**/.kitchen.local.yml -**/Gemfile.lock - -credentials.json - -# File to populate env vars used by Docker test runs -.envrc diff --git a/5-app-infra/1-artifact-publish/README.md b/5-app-infra/1-artifact-publish/README.md deleted file mode 100644 index 51f0be86..00000000 --- a/5-app-infra/1-artifact-publish/README.md +++ /dev/null @@ -1,280 +0,0 @@ -# 5-app-infra - -This repo is part of a multi-part guide that shows how to configure and deploy -the example.com reference architecture described in -[Google Cloud security foundations guide](https://cloud.google.com/architecture/security-foundations). The following table lists the parts of the guide. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
0-bootstrapBootstraps a Google Cloud organization, creating all the required resources -and permissions to start using the Cloud Foundation Toolkit (CFT). This -step also configures a CI/CD Pipeline for foundations code in subsequent -stages.
1-orgSets up top-level shared folders, monitoring and networking projects, -organization-level logging, and baseline security settings through -organizational policies.
2-environmentsSets up development, non-production, and production environments within the -Google Cloud organization that you've created.
3-networks-dual-svpcSets up base and restricted shared VPCs with default DNS, NAT (optional), -Private Service networking, VPC service controls, on-premises Dedicated -Interconnect, and baseline firewall rules for each environment. It also sets -up the global DNS hub.
3-networks-hub-and-spokeSets up base and restricted shared VPCs with all the default configuration -found on step 3-networks-dual-svpc, but here the architecture will be based on the -Hub and Spoke network model. It also sets up the global DNS hub
4-projectsSets up a folder structure, projects, and an application infrastructure pipeline for applications, - which are connected as service projects to the shared VPC created in the previous stage.
5-app-infra 01-artifact-publish(this file)Deploys Artifact Repository
- -For an overview of the architecture and the parts, see the -[terraform-google-enterprise-genai README](https://github.com/terraform-google-modules/terraform-google-enterprise-genai) -file. - -## Purpose - -The purpose of this step is to deploy out an artifact registry to store custom docker images. A Cloud Build pipeline is also deployed out. At the time of this writing, it is configured to attach itself to a Cloud Source Repository. The Cloud Build pipeline is responsible for building out a custom image that may be used in Machine Learning Workflows. If you are in a situation where company policy requires no outside repositories to be accessed, custom images can be used to keep access to any image internally. - -Since every workflow will have access to these images, it is deployed in the `common` folder, and keeping with the foundations structure, is listed as `shared` under this Business Unit. It will only need to be deployed once. - -The Pipeline is connected to a GitHub repsository with a simple structure: - -``` -├── README.md -└── images - ├── tf2-cpu.2-13:0.1 - │   └── Dockerfile - └── tf2-gpu.2-13:0.1 - └── Dockerfile -``` -for the purposes of this example, the pipeline is configured to monitor the `main` branch of this repository. - -each folder under `images` has the full name and tag of the image that must be built. Once a change to the `main` branch is pushed, the pipeline will analyse which files have changed and build that image out and place it in the artifact repository. For example, if there is a change to the Dockerfile in the `tf2-cpu-13:0.1` folder, or if the folder itself has been renamed, it will build out an image and tag it based on the folder name that the Dockerfile has been housed in. - -Once pushed, the pipeline can be accessed by navigating to the project name created in step-4: - -```bash -terraform -chdir="../terraform-google-enterprise-genai/4-projects/business_unit_3/shared/" output -raw common_artifacts_project_id -``` - -## Prerequisites - -1. 0-bootstrap executed successfully. -1. 1-org executed successfully. -1. 2-environments executed successfully. -1. 3-networks executed successfully. -1. 4-projects executed successfully. - -### Troubleshooting - -Please refer to [troubleshooting](../docs/TROUBLESHOOTING.md) if you run into issues during this step. - -## Usage - -**Note:** If you are using MacOS, replace `cp -RT` with `cp -R` in the relevant -commands. The `-T` flag is needed for Linux, but causes problems for MacOS. - -### Deploying with Cloud Build - -1. Clone the `bu3-artifact-publish` repo. - - ```bash - gcloud source repos clone bu3-artifact-publish --project=${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Navigate into the repo, change to non-main branch and copy contents of foundation to new repo. - All subsequent steps assume you are running them from the bu3-artifact-publisg directory. - If you run them from another directory, adjust your copy paths accordingly. - - ```bash - cd bu3-artifact-publish - git checkout -b plan - - cp -RT ../terraform-google-enterprise-genai/5-app-infra/1-artifact-publish/ . - cp ../terraform-google-enterprise-genai/build/cloudbuild-tf-* . - cp ../terraform-google-enterprise-genai/build/tf-wrapper.sh . - chmod 755 ./tf-wrapper.sh - ``` - -1. Rename `common.auto.example.tfvars` to `common.auto.tfvars`. - - ```bash - mv common.auto.example.tfvars common.auto.tfvars - ``` - -1. Update the file with values from your environment and 0-bootstrap. See any of the business unit 1 envs folders [README.md](./business_unit_1/production/README.md) files for additional information on the values in the `common.auto.tfvars` file. - - ```bash - export remote_state_bucket=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw projects_gcs_bucket_tfstate) - echo "remote_state_bucket = ${remote_state_bucket}" - sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-artifact-publish"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -1. Commit changes. - - ```bash - git add . - git commit -m 'Initialize repo' - ``` - -1. Push your plan branch to trigger a plan for all environments. Because the - _plan_ branch is not a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), pushing your _plan_ - branch triggers _terraform plan_ but not _terraform apply_. Review the plan output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git push --set-upstream origin plan - ``` - -1. Merge changes to shared. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), - pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git checkout -b shared - git push origin shared - ``` - -1. `cd` out of the `bu3-artifacts-publish` repository. - - -### Run Terraform locally - -1. The next instructions assume that you are at the same level of the `terraform-google-enterprise-genai` folder. Change into `5-app-infra` folder, copy the Terraform wrapper script and ensure it can be executed. - - ```bash - cd terraform-google-enterprise-genai/5-app-infra/projects/artifact-publish - cp ../../../build/tf-wrapper.sh . - chmod 755 ./tf-wrapper.sh - ``` - -1. Rename `common.auto.example.tfvars` files to `common.auto.tfvars`. - - ```bash - mv common.auto.example.tfvars common.auto.tfvars - ``` - -1. Update `common.auto.tfvars` file with values from your environment. -1. Use `terraform output` to get the project backend bucket value from 0-bootstrap. - - ```bash - export remote_state_bucket=$(terraform -chdir="../0-bootstrap/" output -raw projects_gcs_bucket_tfstate) - echo "remote_state_bucket = ${remote_state_bucket}" - sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-artifact-publish"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-artifact-publish"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -1. Provide the user that will be running `./tf-wrapper.sh` the Service Account Token Creator role to the bu3 Terraform service account. -1. Provide the user permissions to run the terraform locally with the `serviceAccountTokenCreator` permission. - - ```bash - member="user:$(gcloud auth list --filter="status=ACTIVE" --format="value(account)")" - echo ${member} - - project_id=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${project_id} - - terraform_sa=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-artifact-publish"' --raw-output) - echo ${terraform_sa} - - gcloud iam service-accounts add-iam-policy-binding ${terraform_sa} --project ${project_id} --member="${member}" --role="roles/iam.serviceAccountTokenCreator" - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-artifact-publish"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -We will now deploy each of our environments (development/production/non-production) using this script. -When using Cloud Build or Jenkins as your CI/CD tool, each environment corresponds to a branch in the repository for the `5-app-infra` step. Only the corresponding environment is applied. - -To use the `validate` option of the `tf-wrapper.sh` script, please follow the [instructions](https://cloud.google.com/docs/terraform/policy-validation/validate-policies#install) to install the terraform-tools component. - -1. Use `terraform output` to get the Infra Pipeline Project ID from 4-projects output. - - ```bash - export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${INFRA_PIPELINE_PROJECT_ID} - - export GOOGLE_IMPERSONATE_SERVICE_ACCOUNT=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-artifact-publish"' --raw-output) - echo ${GOOGLE_IMPERSONATE_SERVICE_ACCOUNT} - ``` - -1. Run `init` and `plan` and review output for environment shared (common). - - ```bash - ./tf-wrapper.sh init shared - ./tf-wrapper.sh plan shared - ``` - -1. Run `validate` and check for violations. - - ```bash - ./tf-wrapper.sh validate shared $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Run `apply` shared. - - ```bash - ./tf-wrapper.sh apply shared - ``` - - -If you received any errors or made any changes to the Terraform config or `common.auto.tfvars` you must re-run `./tf-wrapper.sh plan ` before running `./tf-wrapper.sh apply `. - -After executing this stage, unset the `GOOGLE_IMPERSONATE_SERVICE_ACCOUNT` environment variable. - -```bash -unset GOOGLE_IMPERSONATE_SERVICE_ACCOUNT -``` - diff --git a/5-app-infra/1-artifact-publish/business_unit_3/shared/README.md b/5-app-infra/1-artifact-publish/business_unit_3/shared/README.md deleted file mode 100644 index c255b2ef..00000000 --- a/5-app-infra/1-artifact-publish/business_unit_3/shared/README.md +++ /dev/null @@ -1,15 +0,0 @@ - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| instance\_region | The region where compute instance will be created. A subnetwork must exists in the instance region. | `string` | n/a | yes | -| remote\_state\_bucket | Backend bucket to load remote state information from previous steps. | `string` | n/a | yes | - -## Outputs - -| Name | Description | -|------|-------------| -| cloudbuild\_trigger\_id | n/a | - - diff --git a/5-app-infra/1-artifact-publish/business_unit_3/shared/backend.tf b/5-app-infra/1-artifact-publish/business_unit_3/shared/backend.tf deleted file mode 100644 index 023a3c07..00000000 --- a/5-app-infra/1-artifact-publish/business_unit_3/shared/backend.tf +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -terraform { - backend "gcs" { - bucket = "UPDATE_APP_INFRA_BUCKET" - prefix = "terraform/app-infra/business_unit_3/shared" - } -} diff --git a/5-app-infra/1-artifact-publish/business_unit_3/shared/locals.tf b/5-app-infra/1-artifact-publish/business_unit_3/shared/locals.tf deleted file mode 100644 index d708d36e..00000000 --- a/5-app-infra/1-artifact-publish/business_unit_3/shared/locals.tf +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - business_unit = "business_unit_3" - environment = "common" -} diff --git a/5-app-infra/1-artifact-publish/business_unit_3/shared/outputs.tf b/5-app-infra/1-artifact-publish/business_unit_3/shared/outputs.tf deleted file mode 100644 index 11e2aade..00000000 --- a/5-app-infra/1-artifact-publish/business_unit_3/shared/outputs.tf +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -# output "trigger_sa_account_id" { -# description = "Account id of service account cloudbuild." -# value = module.artifact_pipeline.trigger_sa_account_id -# } - -# output "cloudbuild_v2_repo_id" { -# description = "Repository ID of cloudbuild repository" -# value = module.artifact_pipeline.cloudbuild_v2_repo_id -# } - -# output "kms_key_id" { -# description = "Projects Key ID for encrytion" -# value = module.artifact_pipeline.kms_key_id -# } - -output "cloudbuild_trigger_id" { - value = module.artifact_publish.cloudbuild_trigger_id -} diff --git a/5-app-infra/1-artifact-publish/business_unit_3/shared/publish_artifacts.tf b/5-app-infra/1-artifact-publish/business_unit_3/shared/publish_artifacts.tf deleted file mode 100644 index 5c20479e..00000000 --- a/5-app-infra/1-artifact-publish/business_unit_3/shared/publish_artifacts.tf +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -module "artifact_publish" { - source = "../../modules/publish_artifacts" - - environment = local.environment - description = "Publish Artifacts for ML Projects" - project_id = local.common_artifacts_project_id - name = local.artifacts_repo_name - format = "DOCKER" - region = var.instance_region - cleanup_policies = [{ - id = "keep-tagged-release" - action = "KEEP" - condition = [ - { - tag_state = "TAGGED", - tag_prefixes = ["release"], - package_name_prefixes = ["webapp", "mobile"] - } - ] - }] -} diff --git a/5-app-infra/1-artifact-publish/business_unit_3/shared/remote.tf b/5-app-infra/1-artifact-publish/business_unit_3/shared/remote.tf deleted file mode 100644 index a9292473..00000000 --- a/5-app-infra/1-artifact-publish/business_unit_3/shared/remote.tf +++ /dev/null @@ -1,30 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - common_artifacts_project_id = data.terraform_remote_state.projects_shared.outputs.common_artifacts_project_id - service_catalog_project_id = data.terraform_remote_state.projects_shared.outputs.service_catalog_project_id - artifacts_repo_name = data.terraform_remote_state.projects_shared.outputs.artifacts_repo_name -} - -data "terraform_remote_state" "projects_shared" { - backend = "gcs" - - config = { - bucket = var.remote_state_bucket - prefix = "terraform/projects/${local.business_unit}/shared" - } -} diff --git a/5-app-infra/1-artifact-publish/business_unit_3/shared/variables.tf b/5-app-infra/1-artifact-publish/business_unit_3/shared/variables.tf deleted file mode 100644 index 8e9bafd0..00000000 --- a/5-app-infra/1-artifact-publish/business_unit_3/shared/variables.tf +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "instance_region" { - description = "The region where compute instance will be created. A subnetwork must exists in the instance region." - type = string -} - -variable "remote_state_bucket" { - description = "Backend bucket to load remote state information from previous steps." - type = string -} diff --git a/5-app-infra/1-artifact-publish/common.auto.example.tfvars b/5-app-infra/1-artifact-publish/common.auto.example.tfvars deleted file mode 100644 index 7f83f05b..00000000 --- a/5-app-infra/1-artifact-publish/common.auto.example.tfvars +++ /dev/null @@ -1,19 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -instance_region = "us-central1" // should be one of the regions used to create network on step 3-networks - -remote_state_bucket = "REMOTE_STATE_BUCKET" diff --git a/5-app-infra/2-artifact-publish-repo/README.md b/5-app-infra/2-artifact-publish-repo/README.md deleted file mode 100644 index 9cdaea98..00000000 --- a/5-app-infra/2-artifact-publish-repo/README.md +++ /dev/null @@ -1,86 +0,0 @@ -# 5-app-infra - -This repo is part of a multi-part guide that shows how to configure and deploy -the example.com reference architecture described in -[Google Cloud security foundations guide](https://cloud.google.com/architecture/security-foundations). The following table lists the parts of the guide. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
0-bootstrapBootstraps a Google Cloud organization, creating all the required resources -and permissions to start using the Cloud Foundation Toolkit (CFT). This -step also configures a CI/CD Pipeline for foundations code in subsequent -stages.
1-orgSets up top-level shared folders, monitoring and networking projects, -organization-level logging, and baseline security settings through -organizational policies.
2-environmentsSets up development, non-production, and production environments within the -Google Cloud organization that you've created.
3-networks-dual-svpcSets up base and restricted shared VPCs with default DNS, NAT (optional), -Private Service networking, VPC service controls, on-premises Dedicated -Interconnect, and baseline firewall rules for each environment. It also sets -up the global DNS hub.
3-networks-hub-and-spokeSets up base and restricted shared VPCs with all the default configuration -found on step 3-networks-dual-svpc, but here the architecture will be based on the -Hub and Spoke network model. It also sets up the global DNS hub
4-projectsSets up a folder structure, projects, and an application infrastructure pipeline for applications, - which are connected as service projects to the shared VPC created in the previous stage.
5-app-infra 02-artifact-publish-repo(this file)Configures a cloud build repository for Docker builds
- -For an overview of the architecture and the parts, see the -[terraform-google-enterprise-genai README](https://github.com/terraform-google-modules/terraform-google-enterprise-genai) -file. - -## Usage - -**Note:** If you are using MacOS, replace `cp -RT` with `cp -R` in the relevant -commands. The `-T` flag is needed for Linux, but causes problems for MacOS. - -### Deploying with Cloud Build - -1. Grab the Artifact Project ID - ```shell - export ARTIFACT_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared" output -raw common_artifacts_project_id) - echo ${ARTIFACT_PROJECT_ID} - ``` - -1. Clone the freshly minted Cloud Source Repository that was created for this project. - ```shell - gcloud source repos clone publish-artifacts --project=${ARTIFACT_PROJECT_ID} - ``` -1. Enter the repo folder and copy over the artifact files from `5-app-infra/2-artifact-publish-repo` folder. - ```shell - cd publish-artifacts - git commit -m "Initialize Repository" --allow-empty - cp -RT ../ml-foundations/5-app-infra/2-artifact-publish-repo/ . - ``` -1. Commit changes and push your main branch to the new repo. - ```shell - git add . - git commit -m 'Build Images' - - git push --set-upstream origin main - ``` -1. Navigate to the project that was output from `${ARTIFACT_PROJECT_ID}` in Google's Cloud Console to view the first run of images being built. diff --git a/5-app-infra/2-artifact-publish-repo/images/tf2-cpu.2-13:0.1/Dockerfile b/5-app-infra/2-artifact-publish-repo/images/tf2-cpu.2-13:0.1/Dockerfile deleted file mode 100644 index 731a8e02..00000000 --- a/5-app-infra/2-artifact-publish-repo/images/tf2-cpu.2-13:0.1/Dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -FROM us-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-13:latest -RUN echo "Hello World" > helloworld.txt - diff --git a/5-app-infra/2-artifact-publish-repo/images/tf2-gpu.2-13:0.1/Dockerfile b/5-app-infra/2-artifact-publish-repo/images/tf2-gpu.2-13:0.1/Dockerfile deleted file mode 100644 index d8015e53..00000000 --- a/5-app-infra/2-artifact-publish-repo/images/tf2-gpu.2-13:0.1/Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -FROM us-docker.pkg.dev/vertex-ai/prediction/tf2-gpu.2-13:latest -RUN echo "Hello World" > helloworld.txt diff --git a/5-app-infra/3-service-catalog/.gitignore b/5-app-infra/3-service-catalog/.gitignore deleted file mode 100644 index fe232a04..00000000 --- a/5-app-infra/3-service-catalog/.gitignore +++ /dev/null @@ -1,58 +0,0 @@ -# OSX leaves these everywhere on SMB shares -._* - -# OSX trash -.DS_Store - -# Python -*.pyc - -# Emacs save files -*~ -\#*\# -.\#* - -# Vim-related files -[._]*.s[a-w][a-z] -[._]s[a-w][a-z] -*.un~ -Session.vim -.netrwhist - -### https://raw.github.com/github/gitignore/90f149de451a5433aebd94d02d11b0e28843a1af/Terraform.gitignore - -# Local .terraform directories -**/.terraform/* - -# .tfstate files -*.tfstate -*.tfstate.* - -# Crash log files -crash.log - -# Ignore any .tfvars files that are generated automatically for each Terraform run. Most -# .tfvars files are managed as part of configuration and so should be included in -# version control. -# -# example.tfvars - -# Ignore override files as they are usually used to override resources locally and so -# are not checked in -override.tf -override.tf.json -*_override.tf -*_override.tf.json -.idea/ -.vscode/ -# Kitchen files -**/inspec.lock -**.gem -**/.kitchen -**/.kitchen.local.yml -**/Gemfile.lock - -credentials.json - -# File to populate env vars used by Docker test runs -.envrc diff --git a/5-app-infra/3-service-catalog/README.md b/5-app-infra/3-service-catalog/README.md deleted file mode 100644 index f52174d2..00000000 --- a/5-app-infra/3-service-catalog/README.md +++ /dev/null @@ -1,298 +0,0 @@ -# 5-app-infra - -This repo is part of a multi-part guide that shows how to configure and deploy -the example.com reference architecture described in -[Google Cloud security foundations guide](https://cloud.google.com/architecture/security-foundations). The following table lists the parts of the guide. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
0-bootstrapBootstraps a Google Cloud organization, creating all the required resources -and permissions to start using the Cloud Foundation Toolkit (CFT). This -step also configures a CI/CD Pipeline for foundations code in subsequent -stages.
1-orgSets up top-level shared folders, monitoring and networking projects, -organization-level logging, and baseline security settings through -organizational policies.
2-environmentsSets up development, non-production, and production environments within the -Google Cloud organization that you've created.
3-networks-dual-svpcSets up base and restricted shared VPCs with default DNS, NAT (optional), -Private Service networking, VPC service controls, on-premises Dedicated -Interconnect, and baseline firewall rules for each environment. It also sets -up the global DNS hub.
3-networks-hub-and-spokeSets up base and restricted shared VPCs with all the default configuration -found on step 3-networks-dual-svpc, but here the architecture will be based on the -Hub and Spoke network model. It also sets up the global DNS hub
4-projectsSets up a folder structure, projects, and an application infrastructure pipeline for applications, - which are connected as service projects to the shared VPC created in the previous stage.
5-app-infra - 3-service-catalog (this file)Deploys Composer and a pipeline
- -For an overview of the architecture and the parts, see the -[terraform-google-enterprise-genai README](https://github.com/terraform-google-modules/terraform-google-enterprise-genai) -file. - -## Purpose(s) -This project has two main purposes: - -1. To deploy a pipeline and a bucket which is linked to a Google Cloud Repository that houses terraform modules for the use in Service Catalog. -Although Service Catalog itself must be manually deployed, the modules which will be used can still be automated. - -2. To deploy infrastructure for operational environments (ie. `non-production` & `production`.) - -The resoning behind utilizing one repository with two deployment methodologies is due to how close interactive (`development`) and operational environments are. - -The repository has the structure (truncated for brevity): - ``` - business_unit_3 - ├── development - ├── non-production - ├── production - modules - ├── bucket - │   ├── README.md - │   ├── data.tf - │   ├── main.tf - │   ├── outputs.tf - │   ├── provider.tf - │   └── variables.tf - ├── composer - │   ├── README.md - │   ├── data.tf - │   ├── iam.roles.tf - │   ├── iam.users.tf - │   ├── locals.tf - │   ├── main.tf - │   ├── outputs.tf - │   ├── provider.tf - │   ├── terraform.tfvars.example - │   ├── variables.tf - │   └── vpc.tf - ├── cryptography - │   ├── README.md - │   ├── crypto_key - │   │   ├── main.tf - │   │   ├── outputs.tf - │   │   └── variables.tf - │   └── key_ring - │   ├── main.tf - │   ├── outputs.tf - │   └── variables.tf - ``` -Each folder under `modules` represents a terraform module. -When there is a change in any of the terraform module folders, the pipeline will find whichever module has been changed since the last push, `tar.gz` that file and place it in a bucket for Service Catalog to access. - -This pipeline is listening to the `main` branch of this repository for changes in order for the modules to be uploaded to service catalog. - -The pipeline also listens for changes made to `plan`, `development`, `non-production` & `production` branches, this is used for deploying infrastructure to each project. - - -The pipeline can be accessed by navigating to the project name created in step-4: - -```bash -terraform -chdir="../terraform-google-enterprise-genai/4-projects/business_unit_3/shared/" output -raw service_catalog_project_id -``` -## Prerequisites - -1. 0-bootstrap executed successfully. -1. 1-org executed successfully. -1. 2-environments executed successfully. -1. 3-networks executed successfully. -1. 4-projects executed successfully. - -### Troubleshooting - -Please refer to [troubleshooting](../docs/TROUBLESHOOTING.md) if you run into issues during this step. - -## Usage - -**Note:** If you are using MacOS, replace `cp -RT` with `cp -R` in the relevant -commands. The `-T` flag is needed for Linux, but causes problems for MacOS. - -### Deploying with Cloud Build - -1. Clone the `bu3-service-catalog` repo. - - ```bash - gcloud source repos clone bu3-service-catalog --project=${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Navigate into the repo, change to non-main branch and copy contents of foundation to new repo. - All subsequent steps assume you are running them from the bu3-service-catalog directory. - If you run them from another directory, adjust your copy paths accordingly. - - ```bash - cd bu3-service-catalog - git checkout -b plan - - cp -RT ../terraform-google-enterprise-genai/5-app-infra/3-service-catalog/ . - cp ../terraform-google-enterprise-genai/build/cloudbuild-tf-* . - cp ../terraform-google-enterprise-genai/build/tf-wrapper.sh . - chmod 755 ./tf-wrapper.sh - ``` - -1. Rename `common.auto.example.tfvars` to `common.auto.tfvars`. - - ```bash - mv common.auto.example.tfvars common.auto.tfvars - ``` - -1. Update the file with values from your environment and 0-bootstrap. See any of the business unit 1 envs folders [README.md](./business_unit_1/production/README.md) files for additional information on the values in the `common.auto.tfvars` file. - - ```bash - export remote_state_bucket=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw projects_gcs_bucket_tfstate) - echo "remote_state_bucket = ${remote_state_bucket}" - sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-service-catalog"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -1. Commit changes. - - ```bash - git add . - git commit -m 'Initialize repo' - ``` - -1. Push your plan branch to trigger a plan for all environments. Because the - _plan_ branch is not a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), pushing your _plan_ - branch triggers _terraform plan_ but not _terraform apply_. Review the plan output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git push --set-upstream origin plan - ``` - -1. Merge changes to shared. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), - pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git checkout -b shared - git push origin shared - ``` - -1. `cd` out of the `bu3-service-catalog` repository. - ```bash - cd .. - ``` - -### Run Terraform locally - -1. The next instructions assume that you are at the same level of the `terraform-google-enterprise-genai` folder. Change into `5-app-infra` folder, copy the Terraform wrapper script and ensure it can be executed. - - ```bash - cd terraform-google-enterprise-genai/5-app-infra/projects/service-catalog - cp ../../../build/tf-wrapper.sh . - chmod 755 ./tf-wrapper.sh - ``` - -1. Rename `common.auto.example.tfvars` files to `common.auto.tfvars`. - - ```bash - mv common.auto.example.tfvars common.auto.tfvars - ``` - -1. Update `common.auto.tfvars` file with values from your environment. -1. Use `terraform output` to get the project backend bucket value from 0-bootstrap. - - ```bash - export remote_state_bucket=$(terraform -chdir="../0-bootstrap/" output -raw projects_gcs_bucket_tfstate) - echo "remote_state_bucket = ${remote_state_bucket}" - sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars - ``` - -1. Provide the user that will be running `./tf-wrapper.sh` the Service Account Token Creator role to the bu3 Terraform service account. -1. Provide the user permissions to run the terraform locally with the `serviceAccountTokenCreator` permission. - - ```bash - member="user:$(gcloud auth list --filter="status=ACTIVE" --format="value(account)")" - echo ${member} - - project_id=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${project_id} - - terraform_sa=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-service-catalog"' --raw-output) - echo ${terraform_sa} - - gcloud iam service-accounts add-iam-policy-binding ${terraform_sa} --project ${project_id} --member="${member}" --role="roles/iam.serviceAccountTokenCreator" - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-service-catalog"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -We will now deploy each of our environments (development/production/non-production) using this script. -When using Cloud Build or Jenkins as your CI/CD tool, each environment corresponds to a branch in the repository for the `5-app-infra` step. Only the corresponding environment is applied. - -To use the `validate` option of the `tf-wrapper.sh` script, please follow the [instructions](https://cloud.google.com/docs/terraform/policy-validation/validate-policies#install) to install the terraform-tools component. - -1. Use `terraform output` to get the Infra Pipeline Project ID from 4-projects output. - - ```bash - export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${INFRA_PIPELINE_PROJECT_ID} - - export GOOGLE_IMPERSONATE_SERVICE_ACCOUNT=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-service-catalog"' --raw-output) - echo ${GOOGLE_IMPERSONATE_SERVICE_ACCOUNT} - ``` - -1. Run `init` and `plan` and review output for environment shared (common). - - ```bash - ./tf-wrapper.sh init shared - ./tf-wrapper.sh plan shared - ``` - -1. Run `validate` and check for violations. - - ```bash - ./tf-wrapper.sh validate shared $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Run `apply` shared. - - ```bash - ./tf-wrapper.sh apply shared - ``` - - -If you received any errors or made any changes to the Terraform config or `common.auto.tfvars` you must re-run `./tf-wrapper.sh plan ` before running `./tf-wrapper.sh apply `. - -After executing this stage, unset the `GOOGLE_IMPERSONATE_SERVICE_ACCOUNT` environment variable. - -```bash -unset GOOGLE_IMPERSONATE_SERVICE_ACCOUNT -``` - diff --git a/5-app-infra/3-service-catalog/business_unit_3/shared/README.md b/5-app-infra/3-service-catalog/business_unit_3/shared/README.md deleted file mode 100644 index 849fc76c..00000000 --- a/5-app-infra/3-service-catalog/business_unit_3/shared/README.md +++ /dev/null @@ -1,16 +0,0 @@ - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| instance\_region | The region where compute instance will be created. A subnetwork must exists in the instance region. | `string` | n/a | yes | -| remote\_state\_bucket | Backend bucket to load remote state information from previous steps. | `string` | n/a | yes | - -## Outputs - -| Name | Description | -|------|-------------| -| cloudbuild\_trigger\_id | Id of Cloud Build Trigger | -| storage\_bucket\_name | Name of storage bucket created | - - diff --git a/5-app-infra/3-service-catalog/business_unit_3/shared/backend.tf b/5-app-infra/3-service-catalog/business_unit_3/shared/backend.tf deleted file mode 100644 index 023a3c07..00000000 --- a/5-app-infra/3-service-catalog/business_unit_3/shared/backend.tf +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -terraform { - backend "gcs" { - bucket = "UPDATE_APP_INFRA_BUCKET" - prefix = "terraform/app-infra/business_unit_3/shared" - } -} diff --git a/5-app-infra/3-service-catalog/business_unit_3/shared/locals.tf b/5-app-infra/3-service-catalog/business_unit_3/shared/locals.tf deleted file mode 100644 index e4a9236a..00000000 --- a/5-app-infra/3-service-catalog/business_unit_3/shared/locals.tf +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - business_unit = "business_unit_3" - environment = "common" -} - diff --git a/5-app-infra/3-service-catalog/business_unit_3/shared/outputs.tf b/5-app-infra/3-service-catalog/business_unit_3/shared/outputs.tf deleted file mode 100644 index 22d794b2..00000000 --- a/5-app-infra/3-service-catalog/business_unit_3/shared/outputs.tf +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -output "storage_bucket_name" { - description = "Name of storage bucket created" - value = module.service_catalog.storage_bucket_name -} - -output "cloudbuild_trigger_id" { - description = "Id of Cloud Build Trigger" - value = module.service_catalog.cloudbuild_trigger_id -} diff --git a/5-app-infra/3-service-catalog/business_unit_3/shared/remote.tf b/5-app-infra/3-service-catalog/business_unit_3/shared/remote.tf deleted file mode 100644 index 21072be3..00000000 --- a/5-app-infra/3-service-catalog/business_unit_3/shared/remote.tf +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - service_catalog_project_id = data.terraform_remote_state.projects_shared.outputs.service_catalog_project_id - service_catalog_repo_name = data.terraform_remote_state.projects_shared.outputs.service_catalog_repo_name - machine_learning_project_number = data.terraform_remote_state.machine_learning_development.outputs.machine_learning_project_number - tf_service_catalog_sa_email = data.terraform_remote_state.projects_shared.outputs.terraform_service_accounts["bu3-service-catalog"] -} - -data "terraform_remote_state" "projects_shared" { - backend = "gcs" - - config = { - bucket = var.remote_state_bucket - prefix = "terraform/projects/${local.business_unit}/shared" - } -} - -data "terraform_remote_state" "machine_learning_development" { - backend = "gcs" - config = { - bucket = var.remote_state_bucket - prefix = "terraform/projects/${local.business_unit}/development" - } -} diff --git a/5-app-infra/3-service-catalog/business_unit_3/shared/service_catalog.tf b/5-app-infra/3-service-catalog/business_unit_3/shared/service_catalog.tf deleted file mode 100644 index 9686b40e..00000000 --- a/5-app-infra/3-service-catalog/business_unit_3/shared/service_catalog.tf +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -module "service_catalog" { - source = "../../modules/svc_ctlg" - - project_id = local.service_catalog_project_id - region = var.instance_region - name = local.service_catalog_repo_name - machine_learning_project_number = local.machine_learning_project_number - tf_service_catalog_sa_email = local.tf_service_catalog_sa_email -} diff --git a/5-app-infra/3-service-catalog/business_unit_3/shared/variables.tf b/5-app-infra/3-service-catalog/business_unit_3/shared/variables.tf deleted file mode 100644 index 8e9bafd0..00000000 --- a/5-app-infra/3-service-catalog/business_unit_3/shared/variables.tf +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "instance_region" { - description = "The region where compute instance will be created. A subnetwork must exists in the instance region." - type = string -} - -variable "remote_state_bucket" { - description = "Backend bucket to load remote state information from previous steps." - type = string -} diff --git a/5-app-infra/3-service-catalog/common.auto.example.tfvars b/5-app-infra/3-service-catalog/common.auto.example.tfvars deleted file mode 100644 index 972da35e..00000000 --- a/5-app-infra/3-service-catalog/common.auto.example.tfvars +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -instance_region = "us-central1" // should be one of the regions used to create network on step 3-networks - -remote_state_bucket = "REMOTE_STATE_BUCKET" - -# github_api_token = "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" - -# github_app_installation_id = "18685983" - -# github_remote_uri = "https://github.com/badal-io/ml-foundations-tf-modules.git" - diff --git a/5-app-infra/4-service-catalog-repo/README.md b/5-app-infra/4-service-catalog-repo/README.md deleted file mode 100644 index a7e93409..00000000 --- a/5-app-infra/4-service-catalog-repo/README.md +++ /dev/null @@ -1,164 +0,0 @@ -# The Service Catalog - -## Overview - -This repo provides a number of the [Google Service Catalog](https://cloud.google.com/service-catalog) Terraform-based solution modules: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
artifact_registryArtifact Registry is the next generation of Container Registry. Store, manage, and secure your build artifacts.
bigqueryBigQuery is a serverless and cost-effective enterprise data warehouse that works across clouds and scales with your data. Use built-in ML/AI and BI for insights at scale.
bucketCloud Storage is a managed service for storing unstructured data.
composerCloud Composer is a fully managed workflow orchestration service built on Apache Airflow.
metadataVertex ML Metadata is a service that provides capabilities for managing the lifecycle of metadata consumed and produced by machine-learning (ML) workflows.
notebookVertex AI Workbench is a Jupyter notebook-based development environment for the entire data science workflow.
pubsubPub/Sub is an asynchronous and scalable messaging service that decouples services producing messages from services processing those messages.
secretsSecret Manager lets you store, manage, and access secrets as binary blobs or text strings. With the appropriate permissions, you can view the contents of the secret.
tensorboardVertex AI TensorBoard is an enterprise-ready managed service for machine learning experiment visualization.
- -## Deploying with Cloud Build - -1. Grab the Service Catalogs ID - ```shell - export SERVICE_CATALOG_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared" output -raw service_catalog_project_id) - echo ${SERVICE_CATALOG_PROJECT_ID} - ``` - -1. Clone the freshly minted Cloud Source Repository that was created for this project. - ```shell - gcloud source repos clone service-catalog --project=${SERVICE_CATALOG_PROJECT_ID} - ``` -1. Enter the repo folder and copy over the service catalogs files from `5-app-infra/4-service-catalog-repo` folder. - ```shell - cd service-catalog - cp -RT ../terraform-google-enterprise-genai/5-app-infra/4-service-catalog-repo/ . - ``` - -1. Commit changes and push main branch to the new repo. - ```shell - git add . - git commit -m 'Initialize Service Catalog Build Repo' - - git push --set-upstream origin main - ``` - -1. Navigate to the project that was output from `${ARTIFACT_PROJECT_ID}` in Google's Cloud Console to view the first run of images being built. - - -## Building with Cloud Build - -The Service Catalog solutions are built and published as artifacts to the Google Cloud Storage bucket with the Cloud Build CI/CD pipeline, in the `service-catalog` project in the `common`` environment, as per the diagram below: - -![Service Catalog Workflow](img/workflow.png "Service Catalog Workflow") - -The build is triggered by code push into the `service-catalog` Git (or Cloud Source Repositories). The pipeline builds the solution artifacts (one per module) and copies them into the Cloud Storage Bucket. - -Once the modules are copied to the bucket, they can be used to create Terraform-based Solutions on the Service Catalog Admin page, see [Creating the configuration in Service Catalog](https://cloud.google.com/service-catalog/docs/terraform-configuration#create_config). - -The Solutions then can be added to one or more [Service Catalogs](https://cloud.google.com/service-catalog/docs/create-catalog) and then [shared](https://cloud.google.com/service-catalog/docs/share-catalog) with the other projects to be used with the Service Catalog solution browser. - -## Usage - -### Google APIs - -For the Service Catalog to be able to deploy the solution with the Cloud Build the following Google Cloud APIs should be enabled on the project: - -```bash -aiplatform.googleapis.com -artifactregistry.googleapis.com -bigquery.googleapis.com -cloudkms.googleapis.com -composer.googleapis.com -compute.googleapis.com -container.googleapis.com -containerregistry.googleapis.com -dataflow.googleapis.com -dataform.googleapis.com -notebooks.googleapis.com -pubsub.googleapis.com -secretmanager.googleapis.com -storage.googleapis.com -``` - -### IAM Roles - -To launch a solution using Service Catalog, user must have the following IAM roles assigned: - -```bash -roles/cloudbuild.builds.editor -roles/storage.admin -roles/viewer -``` - -As well as the Cloud Build Service Agent `PROJECT_NUMBER@cloudbuild.gserviceaccount.com` show be granted the following IAM roles: - -```bash -roles/aiplatform.admin -roles/artifactregistry.admin -roles/bigquery.admin -roles/cloudbuild.connectionAdmin -roles/composer.admin -roles/compute.admin -roles/compute.instanceAdmin.v1 -roles/compute.networkAdmin -roles/iam.roleAdmin -roles/iam.serviceAccountAdmin -roles/iam.serviceAccountUser -roles/notebooks.admin -roles/pubsub.admin -roles/resourcemanager.projectIamAdmin -roles/secretmanager.admin -roles/serviceusage.serviceUsageConsumer -roles/storage.admin -``` - -Most of the Services deployed with the Service Catalog requires access to KMS service, to be able to encrypt data with the Customer Managed Encryption Key (CMEK), created as part of the project onboarding process. Hence, the Service Agent of the corresponding service requires the IAM role `roles/cloudkms.cryptoKeyEncrypterDecrypter` on the project's KMS key. - -### Launch a Solution - -In Service Catalog, you see a union of solutions from all the catalogs you have access to in your Google Cloud hierarchy. - -To view available solutions, go to the Google Cloud console Service Catalog page. Solutions that have been shared with you appear in the main panel, see [Launch a Terraform configuration](https://cloud.google.com/service-catalog/docs/view-and-launch#launch_terraform) for more information. - -## Resources - -* [Service Catalog](https://cloud.google.com/service-catalog/docs) -* [Cloud Build](https://cloud.google.com/build/docs) -* [Cloud Storage](https://cloud.google.com/storage/docs) -* [Artifact Registry](https://cloud.google.com/artifact-registry/docs) -* [Big Query](https://cloud.google.com/bigquery/docs) -* [Cloud Composer](https://cloud.google.com/composer/docs) -* [Vertex ML Metadata](https://cloud.google.com/vertex-ai/docs/ml-metadata/introduction) -* [Vertex AI Workbench](https://cloud.google.com/vertex-ai/docs/workbench/introduction) -* [Pub/Sub](https://cloud.google.com/pubsub/docs) -* [Secret Manager](https://cloud.google.com/secret-manager/docs) -* [Vertex AI TensorBoard](https://cloud.google.com/vertex-ai/docs/experiments/tensorboard-introduction) diff --git a/5-app-infra/4-service-catalog-repo/img/workflow.png b/5-app-infra/4-service-catalog-repo/img/workflow.png deleted file mode 100644 index eaafbf50e1e8f01002cf680878d02d406c6dfd00..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 87776 zcmb@ucUY6b(>5A{^rjR+dPgaO^j<}}AiWn6kdD$JVkm-u^d=xpr1vfzK|w$TX#o@n z3L-u9fRwX|{@(X1=X}>W*LD8V@Z{N@ot>SXd*+@*>)ll&Bc>;YKpbLbF5L_4p z0=-R$5B{Q~$!!e&!SU8tQ-)N3VO)hkSRop>l?(%{H*yGG8mb;1jx62<dL>N-LR_ z*H+Q6r;=0EhonWsDeN9S|91h9 z_ALJ`%zxgIG{k}CsW7BZzWy&c|MTcFdYa%r|NrmRkQ>2!Rz@Qon*R4km*fwz3xhwS z`8Yh>Q0pri_g_*7X@&m&{jfWR8+FV@C5WBuWg!GEC?SD()oaH4Uuv?|z`K9{engMc zU~$1cjOIdns98~X#k4iW{{*~DPJ9{NTaad^26WP^j03eX6&2yr_@`@DR-gc!!4I)z z7}ZY!VV&lh3mpEl2cPu8AJTXEGcbbL)&i}E<2m^K=P6@8By->|cCkseiIdiiy z1~cnrFl!f_>nGWN%~}Iy-O>3(tPdXEB&NfGWmCks$(#OedC=E(2mM4q@Xdei&5#XP z@3HF_O6>9+UMrUNNVqx_d@`qb>HM@^SlOJ1z>ng;RHcYFq>K5!^XFu&U=HP&@1H(B z4?eWy6F$2ebUf5(mnrp6`JtDArk}Agdlq4)Gt~&ygA3Aa=T`rz-Mc-++qkzhm{RY# zATQ;&@zSu`(u`irg;UIB;=2D#0*k^;r!TLPTqetJ$b{)QjpP~~NCoYe8djR#qnGgD zH>xle__8%xppu-L%9+M%MBGeRbM)?Xr}RgU|C*S zA=#)fsz`s|-;eLGkSiC=DKBsBf~aPiIP3U#vY=o$Xx}<^gkv{S15X)8-hbPAcouK$ zwO{B;P%(2#*v|n?m_%iTx@CKOfh##YDpBNV&!6w^@poOE z|Jhxy=^j$0wc^CZCnYS;RXE#E`IIf8J4a$WG&p##+a_S~nXng9@!-$G#kv3LOhbuf z&}xxV+Twa~-qUllUn6;fBeHv+q%s6-x-5Tv%JlxS0(YHgdGhBF`ul9;={bje|BEDU zoh)=Cg;#X*`gln~SbuM?a^T`K+NFsygK`gAORvzy1N+FoHi0z5fwBbSWA>$ojrbA? zEVm%WUbnXQZcdQ*mo2y4G|WOZ#tGW_>@+ z#pquVkJJ#HtNfRruoiAx41EV-@moG!S>Zx#k-20DWHidIH}EFed`hY`V@W64&yjy( zp8NDXFik?%1_fUg_5P)e_k=JC_PZczVjwd!Q#kn0ZKlpXYB!7RHl=kLIg>Q?LR2vF z*6nF_TM}m803s?Xs!1{Q>A0>!S~vkk`JdpUEdhm7R5&qP=UREidnPop?&SQ>HeT+n z2xs=&SDM8==f4+vpvu`&cis`O@GoZGez_FI5^~*lZQ`DIdl34uw|5%CeY{u)n&8wI zN#-zJYxjXIzoz@VZ5O}yXLMumkw;Nqj{khw8eRh9`BOB?PUG&~^tH)~;c}x2SP;5D zfu*K;TqF3rKc0z?aL4BWg|pUmc4#rv^`xCx#eJ$WlayK3bTnkUV=q2TwvlSB$!AsH zfuh&$&(U^`TffsN9Wqw9py7})}24>2O`HP>xNvnvSQNxuVGMZJT$f<;2nh zB2^)ck)a{|ym!NeDLlWmayx|{h~Y@Gz)+z>D`h3Ql&1x|=@4bXF7)f&JFPsMI_BLW zou6Lb`uS>GOq4z0>G@vX2e$NT+Nl~Fo2|C>YHv7&BGmSgZp)lswy67Ti1AioU+)f0?8jgst2MMqqwA{&Cd^8x^D!up?}001!hZlrmv2dRBYp9 zT$~`f4RatSU=xkjSs!)RB;GYasyv0hWs>w#Y{H!~a6#e{ay*Tgg!YaKB9m0#a(BF} zrD1!6W>R2~@JMwSPQS5joxm)Y+Hx2x*rm6i^wTEfShb3x103v0`ghLb#e?Yr7KWYq zJfZ>XRV@=2I6*7!P*U>jp@2V%co}tY)49t*h||h8K{(}iF;icuT;^L z@A7rj2gW?MuVo?+RDRh8u4P!H`PD20mw#&gHH@TkVw1FBqlt1OFh8mBMICJ?7fhF1pVSvv(ik#N-BVNi{8M4yOyykV5z& z!s@7(IB5Al8zOirsCYJ=Okq`}XFSMtL`@!)l{4bL-|Cnu4le{RyS9fMajAx_;}Oah z#}CFu27?ruSO*qGL{Tw!L{sq$i6JZthxYMc5dKPpH=bT)fy%dIsC;Eh-+KtNcEH!f z%PuiNZPkn6YLC|@%7zLF;u0Ybeup1@_f8m_7JEZg`+Qd|fw`(9UswM4O&dk73Mc%= zhBEr*tU4~su5$L;Y3(6}9?>7>;ElQ;;&+%w@#m*5{&ZiEtgk@$bg|dt|8!G^LTn!N z-wE~CEg6vLyrHvhoO1s80xs3VoIfJ=E+w3a+n1L_kfrzZD!*xdhbe^hjqmytO?b{t zhJ{7xfG7wa9QRF*jQ>#kc2`A6REt(rRGWu5ln&=M#4)ipo}3lRs?__77eyBc?eg24 z9jvJ7DzEN4f~dl&LoD5={Gj4EIi}u(f_XJ>LMasSyq@{r!=>p}$P4%peP6dg5E*_G zl?v4^eEm9R{ZbVS#|Bs|CmWlR6-5nuhZ9^kVE4O(JI-Z}B`UVU-&BMZ6Prj9^yQ%A zjfRn2s_IyAC1n{XIx&j;+vIT^y_lvg2W6wO1q7ZXfAX7yW-8_&^8fyf;-OjJb**`3 z=)G17$7fD^$IV@6>za1!ICTG_B^as zA)p~qlo9Fgru=$QJ2)2AN}lmvTA_y&Uayla?m|YFJTKZ$g~A~PzC)eQpW-?zQR1&$ zWZCWOKjI|Pzi;3p&cGdJZ2M&4$q`M>VqYC8{B944vTo977wUzxV1^9J&YfXu1LyI9 zBj2n_%%8e>a%=skD$;Z7NMaa7Mv;MXAMzeY2q&W`5RFuY0dEy6m)6D@AbM~Hk%u%! z2Fw+sdZ5a_e>f~@tnYAw*pm4{CFUlJ5i|VzLW9o2&vjt2;#HRCAYfNzW zS>33zqmZU}OR>Q_neyZ~@Tg!MEjYO%d+%jAoQ30+JR4FO>DyPt-?c{w=?CO|r&3n0 z`=yOrd2Su$yms(pwl7NTF=1xMVkAS1M4plPe>f~D8`t1+S0G(Zn_rHc7IB?wfj=JMKdxYOq|h!;F={w>$z0pr1- zt%O>=CHsjT(yf)=cUKWj;nzvLBARTbWiRzWY;oV*|IA3)LpslbATvfrD>LQOz%MIC z1Yda^9Wj!Kx|ZCcV=)ig)4|gn(;Xw)%-XGekU zqtSe&dT_+<3yJskMKwL1CL~ad$q~cJfVgioAFn7}nCTlBq&+e9jPi{oyzsH=i%PU` zawoOufq<>bR?2&vJwR?gP)oskqm zKgcbg2`cclW>3+YP!iPiwL2M_cLp^P-s{`==Y#M~qanZ;Rg{4APW ziv0Q}tmQh7<8G3$RX_}EdX9QS(KT|Hv3J8_D?RKO)}0OD|| zA}%uu48XAtwS|wQWQ@o)j;GXuIItk{aS*TC_EjR_puA^sS3(4;aV?SB5{lH2NjWwO zH~z3%Ek&lOEjFA7NcUcr+f4C9iM_W7X0?7-dE;n81jSsZ_~$p?o2W(Pz!?LOUJ3F9 zqu=Bn8ce?k>I*KDu>FeYYarNWQ?~YfV!0Q_Jc?&6>d8t!)T1<0OV$&0X@tdRLXcQb zUOaSX!lTW-PQIaXgQUo#0}H5-)BpzEb=H^yQF&;s2`e2k{9(~Ezo1KX!iQUFPWEfQ zKG&)A@YLT?P{xP!JR<++92N04$^itb7Eo))!Ws`u$osKhu2Dp=Aa1F@akq3Q`q@mf z*m@u3LfSL`8d(N98Ot98u1lAdW5mKu)uK?hxiQfX2J!hBgFtj;s&Eq<8_{BLfv&Z@ z=Z~#`C7BfTBl>y5#u zhyv&xhz;FPKd%AK-!Z)!L5QLzA;_l&4wED}Oe7^!j#gOxf$yVBfI<~=0x%xOuxA(e zAO3~gqKSWl^-{%F;EehF2hCWqhtCZNmljw3Ded25cn-)q%(Nanl)=8O3cyG^F6O^j zFpR9mq#nXB-$h==WE-5-jj<>U6Mu$+nMkQq%&b~y%`_G4G2wNrbW3zb&Z@#z)jt9l{Crc7wUC(8(0Q_8;+rRG85asU>u z{)5GK6_8Ltx9nV+lD{KS0WmYL%g16f4iPB;u6wz-PyN$QUI8T;tk~ojsZjWi z2Y(mQlZ?^y48JHA2O(v_3ct7-Og;arfJzL|KXJcJg59NfJ@D#t#|K>hUUdYox=q=N z{!LAofLHz8rDp%-5dLSgW&qpWaL@j$;XhM&0Zd^)l#35Us}P#nX7I_ig`(=8Uj9cj z(!&|Pfqa1c7F4P@F|zpjsjtgD3H>wax89Av7X%POqW=VCtOA05{X9H`-Nwp7I8~_U z{F*EmszXfm;zSc7A7^vv)Ljb|UzH?=DM*On`#n$V<0BJML2>)$85!%v@!i%7mLE-w8>oj*40ppsNt}@HOKeJ2C z0GqI@pS27&6!=6-oDxVL--ff-pYe7x`A}Vn&XVB14vX58zs9214^%m1YU_xNM)RSn z#F98@WI?dAQha$rMaCBoU19vzgnWJ+1X#!Y3*%`a8#NLwhA&(Kir&YzUz11Rxg?G{JDCW zc|&#My1e4+*QXY`t%p%~lzE2t%zf7w5(T7YGVA@n1vyVut0^As+{Y{feh+Bmg?{oe zR)3{%UAzr)SoOTz(LoSIzB7Hq>XIiiDmW2Vb=G$j*eX@O5xYwK$pzubTc&n6SWL5^ zS#_cVjim7Zc9%)v4e}PZ_|4z**?vEsS6WN!(zmR!$C>+q*ph#yaaU$gGI)rmbn|bM7O#SrFE@3xp{gHdW=2UX#BpeN#5o)tgfz?5Aw>OURu$}!T-FBUhJ%Q_)asEn;IGeaHGuck10Ncd8f_8ZFohUl)b^6d!w|V=}0% zAh7=Q++28kMBUA@qjA%(&am3J{r15Wf3~31jM-S>1q%{ieXi|XZFw?G!hKHHW@x67 zijszj8jwP{#$iBV@(T_^*--J zmp?8-uRoPu$s)l|NLC4Mz8d8N9i>d>1AGFZNFUfy~RFd_jU8Oykb1l z_bS`k?fgZ}6!iNTV!}>mat1z=3N9UU|Fk&1t2bidW3QPOdzDA(B`vP;%+qD?ns3ik9A+}MwYG;njOVqc+aAmZ6YDLW-F{M+ zL?)ZpU48n61GPEZp&7WJc>gC=n&YIagr4<#rtEYQ2o=#XWE$d99%A$4I$2heNgFQO zWzzM>y*=xU`ATKadlC^J*&D_9_9}Vg!b`X z6$_kK)Xf`MY5O%oi^5UATs+-zcKksOnp;VwMxkDqp7Y>aWL}!T7LS{9n3pz$rNaY4 z{icYdxMJP^yiIx5m*x7TD?$*zYt90p`ApL&t%ZVUUgV9<)hFyt45BG7Un)$Y*(QW< z%GbT^Xi|W&nJ(Z9zPt;`$?M83kc}Z^(2?uVty0h~H#C}ACx>Mlhpg|=gUrjR-uCDRt&CSN3uKK*n7v%h}9CV8}-m- zB+o+-F{*!hGc8ZA@5m-4jGa%TMcYHs%RxdCyR|m4F5DCt z4eh%7MSbP0hy!6z#V*dQYt0B~=%W+A z5~r01b`;;qN&O)PGu4V3)l#H#Q<}T%NA&zdd>-mVb)a9v`fhv}A7aUQ{OVa}dlUs* zool)Py7MO9kP5NxYw7gGBQe;#=)g($Cc{%>@jC1eMy+jqUrBSqE?QC$l28@F;k|`M&3Xtw~PnfM9mc{KX+Q z{BSFWr@$$6kRkodehd7Ymwmsl(65#G?>9Q9jJf@ttTq%x+YF+VI@lVWq!JDfB(EZa zqPv~B3Q4iiT79&)62#LevyPQ4ANmVZ8Z>#DeitJsAp_nkC~Rvq>LTM;nfJo1vQI52XiE=Z(pL_93%eWAv2kw& zh^ZSiwMNOY5nmsT7d@(=)zW{3zv>tgq#oW*eKw3eNkIAevKYu0HN^i9xV?|=DaoGn zjk;^%$=ne#`?Ew2zCnx~6pS@w0I+dj`R95dGRR+5!c#&8Uu35l{XQI>F)> zehgw`CsPu^%5Qi-9J>1-E&yvK24)(0Hvkg?v+iNEG9fQ5s6*VKn~7lMpGhL7?l(>f zdpVR|`K_z7{XySs+T-6n@`LIMd_>V2V?ssIqV8YKv&20eZe&Zz3rsS`-|Da2(Lj1@ z)81Fb&Nl@T5+?OeUXLBAr*P|3&OL1WVoXd;9kVgtWU!l}&+l;b;~B})`f-E9R}=5$ zrRWK7w+e%n-2FCNI4Ps(?Gu}zA}+TTbmMIi$st0K0iMv2dsfkD1sJ4iY66gX=tx`V zxK#S)cV{N#@JIEG=$bQ&`M@(gP`%?dS{gDcA3}ZWA1tjq(o8F#)avZw7X)i8YFo^1 zG5eX)L)-NvUJSdDsTE+Spf;_3Z-oR%sfJh(n^yxq>(fh!=sYWoq8Q4qk>%Oe4dJk5 zxbxx~&yhSjqHI(RN%D3kAe@ExrI58Qh=bpg zHI2i=8-Gl=Ww~m)_Y?B>SR~GGwO$mIgmo!QFc85Cct8G;yJ0(p=0w#gU=JV2QUHVI zK&5G^k?bjd%F=5aiWdt`kn<{GlzaTq-qq*#s1hjaa3>`tjWsyw&b)o}QwqV>vDa$C zo!MNR&_nwRx9ml^CuYR7d*Q+s81n?qp$ud%dIoOYI|Nb}_*BiP9L1-P$meb~)qiVt zPjUPl)2o96>ptC$9dM1B6Vl8LUY@I`{j5TCN#5XrKditpP8TF8+OP6qL*&{>jMDPN zf?hXN3Arkl8O_x@cfM%z$RC)KeDry`&VG5bWg)$?l0P~j;fM%k8YLO{%cPib1|)V4 zC2v*uVMj9)`0^NQ)pp>{2XZ@F&8Mzqi4{zwSqUB4AMY_q1Xc!HTE2QE`AGS2W2UfI z%A(nAl?t6W=(-)kpJ92c@p}_kWBZcby?pGRUY(0g!@8rF;Ls%WIE4uMbm2MbTyLtE z472MMNeG}0nr(CzhM~PyLE-DuE2(ioWbp6kmF2^Bmg`56K;{2$#;WN;H6jNLc&}ei z6mk0UaesB325nSssNp!0D^RGGkUn0b8{9!KAc8FEpbD+&prJ_E{Y0O4sSEYnrjiSL z;C;Z2eo1~@#*U88b}FKNy6>nzR(NqWN{-jc)b#hx9>~vv+L(oG$Pp@q>cJw!&WQsG^b*J_=XBh7n3eG2>DTxxy&X zqz`2s|B+6Luxn16*9uTA#oBUH@am5mhtoWdq(%dt@anskmK_}@TiUH&J^A^2gv3m$Dt03ii?SJx0_rM} zAGTq>$l&DW#)H+|lb|Dl{ahH~UHlv#HyewnvwO39$krNk>LX{}#wE|0uux||F!7a^ z*2*VI)N|g_Av-&pFP2s?eQT~=3uw(@Bv)?t>}WyD&#&3A%G~H?`-ZTFrl!6tpKgwH z5$hE-msfJZ#y4A+D9bEbs)wA;Pmf(>-`PLkzS)JoFBO2w=eK;Tw`^3TNz;nzB`I?J zoV7L`SJMR`{n!|$B6)JK&U3JikSK3;OG&^SbEoW^$|_jvKg8s`8Budms|jScZm#P! zqjqNzQbVYSX~N8-{0rBo!otE65tkE-sNmZ%eARj>pFF$Ij6;!42y0jMf%Dapyi~-G zNCpo{o^F;9v{JLjTse1}y5t2Jm2Cj0ZC}@uKNEJyyXwG$$T<`I&KBM=%9LOA?e3Lv zovh|V%n9G|pbw0iC{J;7@do52h-6Q0u=&2pHfwBfG5@dt&eMW`KM)s?t1<4u*K?gH zO%in;do$e-rT2+K;Z!dHa8OPlXHH#qwSNA?>JN7z_B zLA9RvcXituZ_`hggSE*bK;Egp|A81UTqTn9Ufxetl3gX152mWu;N4!D7^i9@c_92Z zV{7{wqz+NgF2Dk~k?<{FDg4cyIQqx?zwaII4hyCca;U}gOCb9$kuVcz>t~C*m&;z( z%D8DnHFdA>&R~BWgVA$_cmgkFVOQlHR_5j-Q7V<vXHf zK!G!bO{dEh;RD@sP`jmp`$IerT_?+pEO&r5%c6ODdxgGEFn9GuQSeI2w$ zUcpP;r7sQd??y)qQgRm{Y@T+8X=sZs-gq7xYmf`ocAS%gz4WR!YbbsnR0Nb+O60oo zC1-N!s#6c!Dd{m3@`&PuuXpnDeB`!e-?z)+K?jp26+|d-Xwn2wSFHivh4&5hL46uq_WVgXr9VK zM-juS?Ox5QlpuDFabct5guhi-BK#@rFA>>Bo_E?0kT(@m*pr@3%2@?2r_~4U{i3uE zU}BDW6m}k0tNVh&Z8i+H$n{1jAPMbsWFL_=pX%o~ddHE?Yjq?2~LznzkFD+eqW>tx4t z;^zhxldoQ)zHdO}#{3mTDKyIX*P%VXmPLi#AqDFj(|2bPY;v;@;$zNE{P@vqK$dtdnEXMzIn!*ON}1WH`n zj~0+8-0gOlPL06+y1}?q8L6n)of|s_dM4W<1D4jr5+&{~*RV zPK8=*xJy>pQ9bS*CI8g!-uiP|8bCHtaN_Uni>wRJ;D>Z&d$eiHg=Mgy75a&%4qBtE zI7F#(AHPryAgm+T#bxR6r6ZuL3D^le2nwi!QYh>yU!#}ZNhX-@s<%YArgbg3L}5ME zhk3eJ-~O*DG-zdxV#{k|>>oMQb5 zxwuD*7FrEo={eKsc6~>Wo^5h1;SPuMRvsy3Nj`c#E6&WiOoo;}+c!9nfK59RY3+O17M7L+HH1bXdf~%BM^Ygq43z+k#jJ8I`eix`Dp_ zME_Ml<7;KrCJzib-5GccNJ6}&mpa&OAT0S47HGyP8u}z~`_tRCr|p8u;E95T&nb?$ zW-aN) zAAV_@nY)&7@_VzpMf9NH7D40v97+f!kvPXGmEV0PDoO~|JhgV@qWccU65k^#i5#5I zzIZGUkie>$rH+`y&!n>6F;O;o(6jGc*ml5N5!C`7&=&*&s}Ptv%CIjxK4QBzoV#N@ zSKK%ZK`vi=Q)6aT&VW6nRE|Dq9Mr*|G2y13 zdEC)oJON(-C{a_BW+Q^Km9ACGsk1z3tiUey{l%9UnS`q!J)?HH*8Oji;-kqKCG-7$ zN2uYj#Y?kYqFN6`UW3zK?jg`+;7%tTpHSn4{K6-t%U5Qz3f1)#3Uw8uzTe40IrRf) z>wD;Simv=U>$n1ysH@*XAMZ#hl2e(I!AvtQ!TadBI0G)Ne9kV}@TKtVTlwgOLcZe_ zbq93OH5)}W6iv3eBV!%d4XBY!X}=ZXBa`W)Xx!;2-} z#cLt|1I<-jJo%*rl*7Ke;>&r-4^5Vg74XkiqfZZplWenz9qcyRf~%Uy2>r!>oOau8 zitNc_9oi>jrx{ED6)J=wL+c>JjZ6njN*+f%Sc%?{J{&lagB@-Cuv}hL*+uk4x#B3k zRD^!7tZa(7od&nU(i$x7HbuFWg*x;26 zlV_$y!+c9@Ln&$r2*abac;I>Sj2~$7A(n*Uw;yu)b!<$Or4S=7eaETvLWy+sRcE4^ zdZJ);@w%QO3ERlQ@p&mL?p=r|w9TjYZoVW1scRf^5mz1h7V-qmWE;7Q3ZE0hdAtP( zKT2%6-(%<07dvs`_Y3J1x!wA{y!z+~c&0vf)l^RREBv_ zcAr_Mw%PE0ERHwf<>P$uq7zXZw&=ybjw%jpBR}G}^hs{xije;)`VozEu>@=L=Sz05)3;WX9 z1@>U`u&2G$LPV<8vn5Pk7inS)@>ENx%n}_-RO{isoeJ%f=Q5!q9-W&M*z+j==kS_2 zHv3hZ2C%a|eA~;s09IH1Fm!J9lX7e0lU0VMOW*D#UKHc166`G={Sg$99*!Mshr(qU zMQ{i|M2b!)UG8qLX5-g711>q;A!mr)GvGLqV%_FdlMFgd3{_y;`DNtnHy*4+E;zSW zL2W-e=__r((?I>JLAP}6|H7yj&I-S<#qxY;5kL_l_~_}cpgJ!bEm=a!tW_!Z{tDHd zs;_)ihhTwP4Z|@Du7Y_L?)0Z%GZ1>(OWQ|$t6 zun64g*;EOZ-XAIiiu@o=67Jy|bZ4%PXson& zTSUTYv;U6zG#hPCRK%!5M???3vCZH(NQ@$Wwsr*_xHN5DC7O2+XmFjXyjJRC_ed^u z#9PGP$@Hl|Z~XQ5nx5Q$QyjH#u!^neP?1(Ry`gq3o=J9rF2{DTOSSM^ucwlzi0NCt zYL2&m8|h@-o@q(mO8f@Z;&S&+HA~ntQHjOBKPnPT2KvaWJzNB~j%5t4J&X~ST#7rM>h+*~l8WL?CrElvhoV^hj>2=g5 z48HUH#!$~K9tLqYy%nSTR6K6A5mugB-xAA2$tw3dv0+S+wW7t}Fa`s-Jdu57FoTOF z7StkPM635odZ`e1gd{Oey9s`Z*B2&epqFV|H&My}fHLKC;(c71Vg-%aU9;^@`!{o{ zy&@MnnUbsJ{OHkMRe$W(%?(VR`zUM`h|bkTkAg;v$@n@t62K_cFDMUU%44pO6fOeI z1Z&!w5VkL+6U_1;V09j+M%t;F_t2eF)G)`hzne)0mtmLm3S}NH={@FLoZQ_HGpX%< zoBDpN`&6|=?LmhdH!y5(=)LKqJ_?0W)Dp?fxA~1Ov%i9|*MhiJL?rg2+`zCu+Im*j zk-nRYB=~4lKa1yPLw)@c2A`;1f59v#UFszWVc|{+$0yqjX0?4C8$Nn)!rtxKxbqC- z5jd}P1gkqdB4^n>SYGD)jQhK~aQz_2WJIwkQomnV1hU+~l zz1!ZuJ|(SJx8DWr3Wf2)J9sct{~{G>DoRR2^mRDJQP42$IuTA5$DJ2?4j}+U4E;oO zKYi*OK{Rz0kP?jo$?XKcy!Fu(#^xu+!c8&cxl;HCjEkp2uNlRql9Ls3o*#`f`7sM5 zE|4#48Gjw50I3d19{9+DH@Xpl{FUu#+!NIpau#$9xTBMt6btrDU*ja^OS}3w2~!jq zv&c&=8XqQY%rvMmy@_v916hizCPgIqM6`pLp40wmJ*HK4+&%km;4w48&3fjJ3Fb`*j8FHP0#g2!#s7?E-Br0iXQ= zVkSlb*ibxlJKzTL^6@*1=i5n_tz+u6kt9znFQb(aiBj0mu@ugNiG^aQha$$l(RUSC zX(^;o5R|nS8xJOpNlTT#ce3&Pn6kg|`pKv=P8WtJ8O=REVILOlCDd^?z)7`eF8P9y zNR7_cYUJfD`-yJ2gd(%@05VhY`Zeydmv4W3R*=0+NYMA-a-e`!n;{0i_V%G=s&|VlH^j%t!bTY>Qhcm@Ew8o*F%xud`a8rhygTuC?$Pe@@Z|7SxvBjl zOwU9RQJ9Vu;Fp3wadv2DN_gge_xt8f++`^T3p)CI_UC9g437i`W$7*JTqf~U5D$LW z&_jd9r(mwkr2*|nM)T?&tYHY$tZU7Iny`hXJtloP^E!A`9cv|Lz)Aod+H|mH<@?+LS74?`qGZu092* z(K^&~DD}o#&n^xwe(5d_JfM%l6IVO{?q6^>M>=efjZ!?JrkVH`zzXAikx5P{W@Upq-6p=FDmuLUU6FrtHa%vvd%?v5r+&& zh9AxOW_$rG|8lXQfGfDuGN8|jez>l#&Sj2S+~pk!6Vq69oK?qitQag8PQk499k=#< zn}ubR5DH+q8;QIYC+5(+m0yc>r0HADqVbqg3$hzzNb2$AFDza!rI~hT=yT}17`{mS z#F+Gihxmbwi_(rNq|4DaMp0M~=}oqJsTAi+(Ejg&-X5TQh-gFrU;{^UDT~SYZJ74z zXKlcj8aidf7CC0KN@*D*cY*_YJ)D z8Sl>|f=W}>Ru-pMtb1)ms39Pl{+uHls4wJ!5&>;j%h&wzNh8F(%CStw?^7AX)Y$&tpiZNEq zZYz3WOn>d_P((d{yu)*l9dT}+Z78XF;{T|67xj!7_Ljx-qOPC39S?M})H@7cic-Nv zBLx)Ixu@pX*cizyIyg3{sFXIQqY^~|aBF|^aA%_+*07xqYe5kac26#Uffh!AEdKX# zP0rMv)iUj)pLCiH!-S*W0xD4+?CKd zRbgp8m$p}Z=crf1^~i^b2zIG!tY5wxTcVl9FzuQ`M=Vx)vlJq{@o`}&YECY$vZeq@ z3z9{LX;P@216({JX%c8Mkgisqq9@8%VEv&T_uJHW}VhW8cRsPyyHX$S-Br6vj3g(JvS*@D{#eVv5<-A_Cvyan zjOCv>QB2Cim=tJJvszvsfhV*%EQ+UAH&1>o2o`ziH}0w3PXf22_&ND2ch;EMU3xEH z&tU%yf;^C4txE*y>&KPQ*!p}ZwrYYWgR6rr*-G96T6xs7l6oF0@Z$=8q}Y=!qQ&fK zv+;5N+_h~1Qc~9F%c7okE_c7`BlV8s2q0Y{yQ?($4xf$WD#@TPpKu6cT)o8Sw$n|Hl#UP~?rs_t;;P zy!|(xxQCru-K5z^3`9+$6rm$@Oqp65c%WdikE-MOi(miOdYH71A2e^=e1iS}o0PBo zdfxb#l^cMqs|@%*CNUfZ;^CEhl!Ee=%JKupZoa=2Hj@9}M%+goZ{U7!e5Qh>c47&^ zpf%RVLCYk($4Pq+unxZ_wWfmJ>KzRVmSFb(FA5|S!GkD#{rc6;$Q9IJ1S_RGG}16x zEPdbMN>iv!rUt{*wQNk~(Qa-ttmYIn>(QTn?nohyXoUiQsNdkIQwBQcOm8TG($F)b zD2cLqOwd{Gkx8S4={)F@w#OKNU}d`%Dvbcu^zbV?nlylihR_}lq})^eLE$Xtu9vio zCdwc;nrdj*PzE-HNdt>MlakMwdOcP8Hwzzk*pfqYN&cJIHL~VJU>77}{oBQ$8jav? zye^3cAPT-ymG3q?%B;nG!z~jVN3K}gzuvqgPxZm+Ycnu9r%rB8lv$0=!+TzsIu7X< z^YwXEWP``I=^0_HiGJzqzx|B}#vL>A+8t8{57JRbHdtqmzD5ExqkQdE(^O|Hrp>vo z!2x?Ta)RTco2k&+)Oo7kFr4axtVA%0c!BCWP+*M^mUOKOrUen6yS>verXnXVb3{+& z&V!{qH+O<1tcw*~>!Hog<32VkspP>IsBeO*-F=nD^e4VTdjI$2$dvD`@ql>Ui^+@a zrx$!jbN+c35)nIT*t|QPf16(UT_lOB+&lw@O<0M*{REM(>J?PDS(nRtwc+9W+H+y| zDfU=1hbP{Iu!5E=15T^-k74Q6NhN$k-k`V?=2}?W9os{DXa$ExfE1VDrJbSQ#!#DU zM6Qa>4NlrgW+KHywG4NLMj7s0ODx=Lcpe{<-f=1$1d3xWP@7eDO0*{8;{mY zD`|YncB_QgCiM7w;Qm@(*6{GC=CVi!eejM~v^nRfc+#ib_{?d#Fe+U*1kW6IDxuIy)b%x+^V zn(S6zryOl5&B`9rlp$DE#XT)eUf!KOSpw9J96J!lxcj;XeB zpv>Oz%}$X999<2OQaeA~*Ba|IQJ|S6`Z#W_P`%vXK0f9}02nR}A%&Dc%sc6g!0-kg z3+`RTI=BXPlg6rR!%r<=E9&wb5pne-WVcK=2Rgs~KTnw6dhO5QRx+8Z$xs3AA5M<5 zN={C0jlNXwu|`!_=h==lyJc@>XTTD^PD*{f=F#frGi^F`w{~7iv>W2Vl+eo_r#QcS znkkTaRzo&8j);CK|3E_f5wYhq{JQ4X&>&UW$^*+154ly}caUq651;RM?++!CaHF2_ zvxyMm;td5`QldA;%LrkI&U{*Cn#cx2?@_u~kY@qfsMMl&ynE+Pn>K87VfV|rpt_%* zaoMqPwZ2j0g81D#ah)dRhE>*U2lF?&cNXz4uW{fqQ8yOJ{?gmtHMAzx8t>tKGH$gcP+)9c11XJbWLv{J!`mHei)ikJ@PnVFfD)M(U~ z73)mui7Y@H`Ka@el(7`v@(7c)&J=OF8%lZQv`Mjc3$tS)qS7yL{HGv2*} zY|ynZ8yfS1cJ;o3CaEVq|E;d_Enlcck$8`UOZeu{C@eGc7_0qXN6J6>r~jM4VPIfT z1b&YIs=JWLrm_TNNR|seR4=>}Crb~Xli?XT<6U}ol3yYGPrH6{=NYwu0^t&9 z-A=|QqyGr8M0 zs=T`OV9H{-^JsoA=uAadRLxht}lDj^`uW$-_n!&ttAy6*ah@Tl>W_{m zlfAE7O^Q^9;5nncXrn7klhv8u^W^R^BMa|z!vPabGaM1Sz2B?!LKgQlg9R4D1ryntV&V~PH!CCd*X&$>Y}<%y zw9jA}Ctb<|^!n?+uId-4_zuqJ-3;cbOrDn3&~Np(n)&m+oN_a9H$b9nqOcHt1#lKM%+0fcwt5SZ0&=HI zm#%pu&~Q$66dot7AUQ4`EN=Lkh@(2VI7wOLEvssDbahQG2KqJc#A^Z5k){V;$lc+F z)-W!bUUt+>*#FI7C}K*>ZH^O9)d|65Vl4;Dm?&stgxQBf<>9va?Q4%6kFNmEM9mc0 zXRf1%ksD8aJKuP!r$ziqoAl58Hb%`V9#Grxeo{A?W7{+BXM?}y=Ar;sn||e+FQ6^> zml)lGnft`+(3%qmvgQ4l!pEl#IlOuFCKBmGXKNZtLDd_ z>u+xzCI_>xTO!*_!e5>EJ-vncP-2)$WwUJrSud+jYeOV+u(8T?%x@$-IJN4N`{n(6l&uVu!rbON%mOb<`>t_Fsl1Oc*SGhIVa#eYdragz@HnOAEnUes6j0jzPNyCfX? z9@o=NFVMLAbrNxd?9K<~1GBkr-_(nkeW~@>85_=@FmW||ynbaCXjt!;lTK{qQhs^t z$%S>5EbL&*rKI=HBz>2168=w_GSt_;6p7)%?L)1h!j&8CySghwi_R9DzVjZ=r$@|` zMmO#p-ZkZ@_8nHp=7^dtGn(}7GZmP$?;WPY@VhkR zLI#=zg!hv@lG8E+?>61++}A$_GypE{;_LiO7CrwLd+!z3WYD$`5_$)zDhkp&p(8CI zh@hZU=|u!VDWcK>ger)LG?gX-B1rE@51~qjfOKi0Nr%uw5q9GHe!KrI2Ya+fd%(%_ z3&}Gx_uO-r>$+y45UL}l{QZ9cc+qNc5!$G}+L;%}Z{Q7}(O_OdR-(ud?Wmfx{JT=a zkG>}7(Z7@LdenbUz9%FI=KA4J3e}YfEAPsdLw)Kf^=jS9hwD(u*OJdu7E}A<-UP0G z^Qvoe5V)GPHam99=fa?IoI5IgO^jJ#gubS^|M^WZzcjPF7PUDNrP`b?}iHw>tm2)qt`;UVm($@T^aX-gVwil$RPD~`wN=%L|ANMVouAu!lVa| zniK&^O2XLAzjpSc?**-wR%aUOZ)<3Dfm$7}ipuP|=h%!#fzoMu1f&O498D!>=t(I* z$%;*aw!N&YS=r+-CMKrKuz#nG{eMpzQC7@kl91nd<-MY5ciniuk+{!t(utWIE}lfF zYx4X3e0OKzYHPEnidg?u5}a5*zvrDQgS0X!=jg5o=u!7-I$gmxf=`}qu0{P_L!7DX z!MeA7{=7)l#GJZoMz~?fuuFeV3!QgsXc??H+Y3_3^_RrbN3U@0gMgWeFx~Uy>DL4i ztFy}Y7ek^Syz-x>z7iJ{6SKDV_s+Jw`ADy+nh{5s@Ln!7n9w2SpBK6EKVIY-PUz>o z(f%PYkt0smua6G>l*`X;67oY|i6~~_Cz&{Q>Ps;nI!Y8{m@WNgzbP+zYGcjMaE0@r z!!uUOIi{<*S~kDZV}$g(0F5r<;SXb4+qAo`1;TF$#UHL2Yj2kOAo|7R73@;7&MFJq zz8ih=hvFz#-zTVVKR0IUkv_37$qS0@m#Tebzs4dD^G=iw$j-U_@U1atD)3`5U-;up zg>l~!>x`oe*9i{(NT+QSB5v*0D#n=ScZ4QN;A1aFzq9{6P<+VtKWN;#^d{C%E=WP@ z|A{l3{~KqTvp1396ixG--W@qV;Dw56Mh76RoM>IS9*Px~Abkijgn(-ciyq}YkONt^k>d)j2?8O@FqHIUu*+@5 zfot~p$g{JLyUl)XB$f>p&l@bxcX5q85)w5P+7y74+|Sl>8zS|e!1euqx9<`Jkt>e$ z=5L&alFd$;S_W9Hp6#uT{K1 zbvJy}_{PQKZ>l$Zo_zIvK2yQyvh6gxL91q6Zj|{0F`;MN zUc^Om#jhVp#lIJAbdz2n!j%#Uj&k@G2qP13wlt@tu8FAf_53F#%a*$8Le7_H$cGXd z;&0e8fjbuc_FSs7Xz#qwy&Y@r`Qdfdnkky3xzDkv>$KB1w63|(K2s~H(rhE*k&wdI~1%_mGAOa zd4<%qA7DKZpkhZQxyO@z2MMBDlNr^}sFs4aQp1@A?Ri)ORi6GfqWAM(R9&`0?wh=d zcDT2-qZ3D{&(?`?FDw`$E2$r)@mWmm)<4oPiT>D$o0W^i8kkJuW;T8pEq|4m`tehn z19KI&Oif#O{qz3AdV*jJvIc1}!rz9(Lnc?Ua*prd7Qkk*XndA- z6)gJgCEn{om~LwznJKRK71{nTjfwbwPGhn*`#7-^C<-$OnILwJO1B3j{rZ;NM?1A= z9lZD?nQUbI{2};ABhCH1VCEpY@UPs33>41mTnq3w34*D?VQHEFfa$ijg&;GsJFtIl zbc2b60CDw%Aix(vxPN}!2EX!RVuUwH{8Kr+L;_?3?*}C|UX>AI!%|lYOI`mZa4th; zu8#$R(R?%-QmuC&@9&Nc*I8+T-h9lL2a3F)@DLV7>y-ySASU`mLXiT&;!OY9PVis3 zP}@_B?6Nw>U6s3>GTK1Z?|4d>BqrUtCMQ$Kb)g$j)2HALpCA0wwR83S_a6mE`t=ha zVzY=GoB?6%bWga#P`zVc0zaPPFXG1hAHK%@e|fi5*{OW_Q^Iual=%4aTEZ=BnD0^H z7yK7 zloVbdEf~Ehwo)@sfrxfCFz)1}tl1u4jdhYQz6!|jGS7~U-Zq)Q6e^eiO&z4_6BWzm zV@|;1PrgaeheYeHAK?pQlbxAiK!}Rv3@AXUgA6iDEsXNl*IPuZqeUqI3suwW{6ret z=d^2(JlRW_8+3`1dQu)W>{3XM0FBiT%)-W<03x{vI)WdPk_12~6_0O~rSEF_`1pKC zNwHwpAhB#;-4JNj^~qjAN(=VF9;RuO7XoV%uX@MSW<>k?bAl_gJnZk-5gzu;uI!5o zRKINV>|b}PaHua;(FBjC$LrRC7FW8RKY+e2tre6c05o58toK0A>l2@l ze+;(PB-=`CLlP>$G|}Lkv&jEF)M`4aSiB<#&+7k2IpOUH@RWEHfHX@0#61ev-)H<`V(JP4-b)=hW<@-S48 zB99xnDfsdL%W8|Y;Ua#)s1y=}ikkocK6Rw&vHXX)@?`Kc>GE+Kqi&}ZT8Yu47)Zg? zDE-|Zx;4AKdWSGQxXtUkzvj4Jm-I|27_D=V zYEkvCyt@Mnlko!#h{LPxJ^*Rh)dOu5!Aof=0fjAgXT$2}R>=UonBfdwk!Aa)bcqgA zDG@;I%;4!FKqQe@`tAGopGc@!w9;YzJW^7Iu;WF>rN2i^T+!J6qZvOmR>mJcbIH^? z4(4i*3cgPdoJs#LjR5PS8jx#`wDl<48YU=i)u@xwUK2>_Yyik|r@pjm>Bnv4$pG$W zTgi*P!pAFB9!Ka!fQsYR>;|ZUe*FUg@kj*9f@YMIl=zxTX+P_L_q6Ajj|&9(^s|yr zc}9^sn1(CAGm?@u@g;7BB6*W@aUS-5l$HgU&>p%*KW&unhEAZkRe7>y?20wl2(zCPBvDBL^ zMR_(qKd)3p=(T-zvR*=?Tpo;16DMM;zt@6#zN=sYT_(lqTqnI{Z;JHJ)|Vg@Hop)4&+xM~VmG?7p4i zm=tl_5OgV8H47B|zOj^ez9Tdyg9WBt+5BDkF+Jequ8>jg*Z%!|5|2ADsR1(l7w0HZ z6u%C^>UWNB^WC2$UfzUys$ zjvvB3KZmB%BcN-TtA_k{|^X=>bigoQ3f_-Lz%Q1oliNH97vTsQos+MO?}K7gl@ zkS{Pws)B7I`fk0vO|p-TH+si-FVK&+gz7fl5QE2`hC9o{HV5t|d8d=J#wSGu{mYO+BBfm%6h-?kB~_-Ft$rn-A2~LJG|*yGySh zfjzc$Q@W^^sZ;@L>o~jDuc(_^v7A<3QNb%Kd-w&tWA^;{mVCOMqfMy)V-wv>20UTP z$mTI@1~?(L)gQ8)|7&Y6<`6ARZ}w8$CO|O_8jfbU=Y`U;{G^pidFfaAolhRx(H~a6 z4PBzv#mjx({m2*grDV|22{7TFGdMg_Cn=;w#12nRqy~5UPr6oj0XPlYiYu&yA0+YA zn|i&ynRrxG>gcrx#Hjs!kBGN#W82zp=g1_wuKmL+9ItS3xb`xi_0A>;2f^GZO1(Aq zF4vY>4aUSh11;TGK&JDav9a;MgfXC>(64!}eorJ#4L^N(eq%}8J;Goo=63B!QAZ+{(;wh(L!08V} zWzjupoPQ@A(%(81MGSKf-o_=XO(4W=@OXjIF8w!EqEZ#n_)!#P21z7*V#XFteuO$7 z{pqP|APJfETuU;L=F>cgxb(MPLgv!(|H3ztVjsK#_(n2WZ{1MGclz?)E46pfw~8c1 z;63Vch7__|R`a}4#G)$u{Fcx4_1kyr&EpJ89%u2~jKlMJaRNBn_ssJitue{d7C z;MHU}6FK@sF?|D=e}={LqHFI-QG(j;_G`b~TbCO~eE-9B+OOpqb!`OB``0%Os)OCD z14rOc=$vWrdxFExy@8+Nku7@-0LL`ybeqcj-)h%-ym{?Nxwv&~Cb?Yw4MNe$HY9Ic z?97e+rD1$Tp$mfP%Q}THLQE9xoU8&+2ql9=i~{IXyz|Y+)2zt!p8}6)#IBC;1}?6r`8657 zogq{r^f13OjMZ8IsG)rVL`5ntJ<#(V{_@SHF~FcPz`x!zG8T9p1OqO>tuFm=xhE>! zzH{aWVO}y5C))ZNwA!2EtglZ5!*_+6bil%`fftWk zx~AZfJ4_o6@k1*g3EY&pdZ<^E3Sc=(@B_XFUj#sCbb)vtVwhjbbq}q$jWh(kl>($c z`QASN9OP}L1pP_SFzBcFEXHZtgG20m%6d)arW8#*bpF0|E(POW;NNwdZo@1?fV8m( zAgeW+jFM25YdCI*TW-ldh=?d56)A?WP z3BXQu6!+myqZ|+3Ug9>I1xUJjU)$IBi4#RFvbV^T)?N~Jwno|#OoNb?*Dza0e=iG2 z1?NUJ68wi3rm3k}fEdt}aL^}hx4+4YZ>HX+uX6@)^(<#6g>SdJppX`I^E4owV;#l{fslBxkxNz&eCVkEUlDTpe!>+oD z@5SFE?aB@zRuEeMaDZo?3~F-ZEx*Z`R@@1{!tSgj_ujfchLs5*ho_vPxlY zCyH<0V-M$eX^}Q7CP1n?76hf;AkoOvxh((6zLt<9O~Q1D(hn2dG^@9v#%|5>_*dj|gC7_6c(P!_$0~PW=>H(MnT!6H+}8SFGl_r2DtUdRAg5j62Cehi zzBbMKF*NK{r;odAi_Q+hM*jFBWE*w*z=x>p5(YpMjh+g~;>Fz}FeTl@^j&7)n+1FP%Pz}&-_@CXWgO;pzExQ> z?ZzDxTq-<#KbVNjsMezT`NQI-mae|_@$|O`1KVXC?1pr&l)mD*OnAZOa#l|E zTq=+2;*Ww(h+g)|j0rs2_qcvOP(#R};WA-x5VorY3`rW*hFmThr{Zq?GvSi!%>HulO{eK*xpvtjx0tBJGzf(kd0Y4v+*UtlfxYq z(rA*}e0Q-Kv-PkJfMm_-CWW74Tj`wCe*itz0!eJ(;_F09EF9Cxtz#;KJ+^r6^Pbn{ z3Gf=y^)q)8Io5CJ9=pOrSphR-o?mT>MaOyT{h4a&fD2Y`(5n@#UrH$?>nxZfe_esdyZ{a z{|zYLqi4=i8tTU(zPh5YG0}_RupL;&_B*;?@i}zPC$RcK`AN)g2#!FLfQTUO@J*HT z*n|6*u^0=WOnO2TLw0>MfFOz9Hz@Qj0lT#VF(GykG|fpYJKce*_?iN;fmDYJF*tO^ z@g(ncgc#o_-1PBoF#TIR27A1MjP9!1d%Ohu0V#Ew>Zh0_-zHjBD*CsTU&u#(s;z5s z_#63%Kpf%gZ#a}{fG2iszRjNTElBFi;M)jRr7z09`+5=wbjtg&zNSRK2ug7}IZYD%3v8iy!mba%*d`8S>(2Dr zbJj^ki|3L+eGG0iYMSEyF;%2F9a5*g$L2_af{v}7{yh6JJF)If2J6s;i?`M- z*V>fNuExQ}J8A70*wt>bNn>pbZN);$u}5rnw9v0RM{GVB)HG@E2_iQev*K-;I^D~0 z>1GR3g~4?7FJ~0^4T~l$E^^*kx88(){vd`0+N#)-@3*@?Tr|cPld8f`QEW?BST4;> z%oRyF-Je}7V5FXI7cS2ZKV-zuNnsrV0xtzfHYUD>h?0>CX*N^-B$ALFKIEaJ zh&02}&fAj2Fd~hE1VWFRsL0Uq50aE_*weaw_rE?tkedQBb)9rr1Kf%N`5KDEhMWN% z;)h;yQ8K$hW7JFY8IctO*alKV)!!h#Eu2Z<-XLUGE8J;&P?*$<34RwY{)qlykp695 ztMM6!1a5#lHe%pDe8HoIhsv#HNBppAf#sZ_Nt--|?FP#eJIPSFqhiifRPj(#_~mmD zrCQ#)#uCU4L6yxgn5#mO@tPrb$mMvkB#N*48e*DEWSf>t9ML1r%V_1RhlFNhIW|hQ zjdR{bwWKg4eG3o$jO^cX8fbQ@l@iixCTJU{zs=yN#NX)-B zExXnJi7w}ITZvis5|+YOR;uKyL{*qD>lZGAJ7nWYBq_x!ia z&)Ho-y~tmI!iVU~LVf7epqE|}H?`gj&0&kJw(nx7Crrig7vRQC<5rW8G`oox|?%={p)B-WYaU&pPR|b zXPu1ke9dBlX8-yi{MFin9NU2*?~|)8i;{}A3!j}{9Nmlq-EhMyb@qLMDZ z80nOPcD!g51-TY4(P_IB&!u4ho8>vdP<~e#&uy3*T*(N`%FLAbEZ3bvPZGv+(a=MrYF3`Q$ZS zEq*==_+U7zokA8kE=o~!Xn!|$SYVHgp9iQ%{Kva*;5-gGYqv{R(Sy`mvAOHtITxOj*ozz{Z(;BT=8tH zHrdVBiD(%}Av+ucA^AZ5q-|1~CrE(r=6D|^XYy7`%A;~3^tk`gbdFMzX?RYsZuomH zIsyS?Adq;PZ;p(JJIvy=3A)+)6Rz=$1)u_z~t=s=Rx)BvIN!q&^RpWXP|tF&roj(S)Sx z)%_{ouSUAJh{?415H_I47Q07VNp_o-0t;2V!(c~#9yL#A6V64mJIw8Z7?miAkf4!2n>m7btjpj zKJ#z7IItajJKRJCbHndohZt#)2s?~TuI#?Pp6VZ#H+D|8iE5n06{lq{|?*_VozU6I*I-b5Ol!!^&A}8j)DMvD%h$M!XTlu2%Y8 zG$`HL+T8k!3wzs`lLvcw%^pK)q%2Cy=s*^PLO<@P$iMFOYdj3j~vALn-od^c#e zu8Ep>iM_KN=sI9O@(fn5Pe6{EY`3@*F=_^}anaM~)0V0yDcrap?lC`3R5+*cl;3(_ z&WyCQ)tvyEP2Wu;-s(CZ!t6Akz%_(Bfri;r4mP6}a& z@q?B*OH1(WIDU(W;FWL^8B7{~yxq_y=2ZIio4|eFxafP^uHF|H=)#IG4PWZ3 z#g3-xXMNKAgBk$W3^6O8c>Luk)w}%RO_rb+t&f@+HCA}b7FXE^dO{X}vcD&VVaVL2 zsql?N0xKjTj#%zKBPoT^APb2&QI-IY8N^F#i^0qKy0v0U2Kf?C92E)1&W4NNr^PN6 z6glvhJ!sK8OPi0n)v;kg>i6Lh-C>Fb(_AT!_D)zuf$5h)w=Hx_)L%cQukTTqRU$4Q z_n5?3x#OL+x!S#DETozzvw5#m?NU+Dos(M}$6t&FRV6s>%F=$h^rhV16nLi173Oq~ z6obr`W~gsOg1!Rr0uG_(Q*0xZhu>mJ58?!Fjprs8CQ zjUioPj#2r2WM8?Ln2pd!krYou26QKvc{9%lcbdJCc+cn#}mbvT7{yZK#;Wbpa-6t%r zQ)o-&bWa8x!mI!G@)qyMSQ0~DaUI}zRT4|+%RRFx5wj|=<^aUAck!Yn)GF^t8eNDu zI91`H2u?G~8EK@R%#&X_e}qFx2s?;kr#~cl+hDtwy7xZmH13trmQoPB5+Sk8ho6$) z=S51hYBVJ#uX)nou)C=*EtjsieKN&rd*8b9L#;482JJ+MV&8mMaJWr!#Ngvb>_^xd z?W#qFlS~n3JDYrv@)DwEX{=qV5q0h7Re|cK zf4}0$s+L=c;)JjgeA`P@(dgHBODW}KNkq|Sx1waC={oPsoyi~O>^O|G&ZwT!XSz(O zy8;dMc9p|Xj4MxDeC(7ZGsMQBdrStSE(*RvXt^0WP4a}VNwb{gE7U7k5YyxGTyxb4 zV&hd9x3#w#3>|&VO6s})iCDyTLeY7bAbWj|`x&L5nGc2aSbt+re_{7m2jUJ)9Hkwu<`0Vc$H{0KFaGnvhH=2Pfj`?G6M)(l|>`!m&}?4Ms|cTie#+KNT04up?EK za#p9c=6cWTE$ax5(UEoC(wezs ze6#T<>*4l}`ot%8m}C)2t1!ia(&mpLOG(DVq$dXBZY`t%EY6|cCu)}klb4JrO&~rv z$qHAieeA*0`j>nSmOeP7H?%k9#($=D%~Dh#GwYV@2}rR>)W6%s=#ndb;OuQ>^&_dM_d<=Jl8vRUSFcAeHF$pP!07 z3ROkl-eK12-zf6dhoj{#p(7JFW|U_iSk1P|ZI|slG|t;pMqGnEIIMoeX~DDHO>d$@F4-Qv|yeEF~My9@~odQ*U zznzwDVnlM^rht_wrYBnF7YPbO6+d38{CGL_3pBE)x3v%*SKsX+TGs~2lo0H=PGpZ4HEQqf%$+|L z4BT@Ri@sk-Z(GmNEQTRacj|e6!8Z2r7WS(((Iw%5TQ1!1_{06R`APkR{`-mF8E-@? za|KJSkasQMLiz50&f5D@_XBh7NOkgDi*T0yXum_KO?IpGdPw6@Ub}ojezso34+Zm9 zjW6z++4{6>5BQ9@Cy&s}hhTJOCB=~Rqpc+&ZT?X%ZT<;`zY^3twEq1p*h}|fG;f&R z+zJYtFRtHm0@wO^EU%En@mc7WbQupUH`Dk;lQyH{?zMkq8M?1oRFA_S&FJauy&rOy4 zcMqD##8HR8oyF?CK|y%B#~a0ptVw7si%gdc?7(#h1fnd~D z-@|V;2jgz63_oI(LMv8pQi++~BzmVF;uFbitc2YmSwpiCvq$7PN_eziadat%N}=q{ z;CmWrI=St9>!$2eTEJ7rtubKdrZZjE+;KJhWX0PP?&<=9O{@vIVRdCQp*j^FADBYi zRphhZXmHIo^Bh0U=$H(gIb?j${!(m}iRCQ;hwVZ0cDcuIpJw>FIr68!!NJNBE zP?-Yh`e|WW-cuT6+|K6;*9GRP%u=f`#{_PJAf@Q%^I;w@Qt3;q^t8m$J8`{FO z-z+igbDx>cx3pbY9&FF;r4xmo>1mNOid-vF%%QW&ZCp>JVRlIqeq_u_j+g2;+#Q;#-9?swHjmaX4HVo6(M!1aW8 zmzg7@*t7jFuMG5ux(?NR_$i1g!!M0m)f>Yy<`wUz0SBgUGMGfc7TfsZsBuPQ?5zC zU4Xt@eRU|Zl3Tt{U-SCX+Kxl4+~zgkFRSOBdcUOAcnfH|phmE|$S$#y{u2shoPZ(Ar5XzE* zOwyL?=Dt$@r!G1Os>ss0e@yK=fnKvKi+J2~HEjw*=r<2NQlm$==d@Fq# z0psO#J`(DZlj`-#`T8i8QsntcJ|KL!5F>c}DGFGLVQDfgGIz<^*NSU)kv63r_O>|F5-*MYiE$hF?1r}w|P zJk%jqWq4u)!yQ{1Kw#-VzGv?^u;ENiX8<4H3s{#TIXBS?xGi!;$q9ZDbXkpNJX8r# zxD^k_?WQ59=9Z9!pR6_>LDH`lwa9#%-H#Afk0<3hqI*+!WWV@gxOrNjA@FsLJBBUt z!8tD%+9yCwYXe12p|Z5C%5KwsO*Wc8);~l9?$DbwH#2(4_$t&Rqzc?Px;eiE(U`~8 zlIGgEoWU;VLvBjm+0y;RwgyQRnKCWhNhi2~V6NCVw@)gvIH(E_BtBj6hM~P@y}@~J z%!GY71|1^dE&53W3*-;v1AIoR&P!6=`Dz^u!D)?R4>3doIMgG~F7KXP{miMOYqQF* ztdG^_2s*-$bHy0e#|SxPaGTcF=_l5QO5TX&m*;%5GBY4z=KHO<+sU135_clP*Ac_9 zjeNWO#XY(#_h)2Nq)<>S(7doM?%(a*pt!dMf{IrPy!r8~duBruz7bn}%Vk~hAHv$k ztIWts4<}r!r>E*VSa&a#R@quLEeEm2=_BE`ea-1*>@cq6mNk;#nC0s)7J0?N{$H=5 zL(8B!(Sl(zDEOT@_A27Uxt z*7St_r^;0#RTPa+Kj$VRaY}1V9c)jURz4X%csZ9SqBPzZ&c6kEzp~|-Coj;EVs)L* zYvxF+G2|?d);>EImw{GS$(@u~;y=lbX=Sce6MS14Tef~UMCQhKqLZgYL67ZZ!onzQ zz99M&p);}^qz9nzc6CPWhM4sVaZ@Mj9srn_mPRbEP7vfz6C?_E$S+XUn&0$m&j@SY%SuGcSXt#OxO;R(zO(Vw-HLd*<52 zi1X+&Ovm=lN>sXG+9!QO64awiB-6raTpM*WkS^f3L~XB^jkUVZ1S!?RDMhNXq%xsi z!{x&r#n`cfusBN98M_$eKWbXFv!N7>`G-(Q$ZZ{Lp!$Y3Dl4?}^*ut=#$`b^wBzOd zuqPPm!<)%c#|GCqoyiZkh+ZJiFeH>{eW0K-Ma5$M?u8B!R2LwPR;}MN&UDqiP82-j zpc8K|Shk_w>)rVfz8;}BPa+q{zxK{?YbnvjYn`s?g8&b#bzKajJ@PGYtYoNZxx#Z) zSi-`@n8Lfv8ad26{UQuKzd9L+Te;(PNITN1DfLVxaqP}@+`yaaJ7|>)WKS*yj(`Ub zWH^=d1zMBW3Cd#d@}Dnk?AzkVs18hHan(k)zC=#;=RC64=`4{9Q>`5>WH)%FL$IXf zMmxL|w@K(Cb3~C&Kl&0q#jm}_@?u;7&g7*!|G4n$qnMwd8iE?%p+BjfFKg^# zWe22QuW6*ui^ysdXcdZ%Y6)%sj&5~vNiUrC`V_Co3XXPnR`?bj$83G*O9OHM6qxqD zKEBf;T!jsZ*zSQhX=c3%eCZ9kqxG@nfCB!z^Uh@p)A8Nw=5Ilsa=pM|;doy;B2k0_ zw?}f5T6op>FvJz&2j2-2wINWkCf}o;XI|!&2xFC2a#WI2Qb)r1L_X6$8`fWNu> zXI?6}{P1y9>(w{uM08365S}A`-?ywIpeXP)h!0w|ey?j}&nzc6rfcbF*L5Pd^N)?0 zau$^BV@bz!kIS!+SCg6QdH%1S`zCW%492)r$I&%)BvA+xbmHc;qv-Lyxy$TI^SwSq zKhw)d1c>0|56OisiF9^bVNIX!A(aux>G>$BxmaY}AMMfEm}(ouU83g%Hdt4=7okl68*;h4)ejK!RIM*!j6XH%pyrHyDf#RCS2j?{PwifLS=^Od3To7 zL|RYY4}OfcMo|9^U$9|%yCgUQ(V$Q}rXU_QIDF{#3Y}YZ=e@8oVc9Q?Y-RMegSS@f zUFNi7MP4>>Q;}UfVL?~z1jWcb34KEN4LOk}e>xoN{9MZ;o5)TsBjv#(FQJ8K1AH+3pJd4uv**8yS>_zL zV0W`=jE-M#gLg>T+gaASw3gV67xdH-qdDhN!`58sh7?TDj;Um@_sz8kG8I%yLQzp^ zYhB{+*P1d_RVlozCom_#EnzMbb>1ifj)kQt zF^N@Gs?!~~Aqn|TLIm2>ls+He!hNEGuCS1mk%jD5t+h}=J(@H-KhQnQziUB<2#AR& zp{m(=eH48j%kzeWhGd24e1)}1bA>2U@VpPkO!sX^jUU@`Nnx&oWO$cP8Scf*`5k8T z+CO?K{k{45d8k<_fqMP8y2V6jpTov@ug%0~JAD^TFtHDb5tVd^DacA@=q9>=0%t%o zOPxXk&X-eN=KGX4l-=Q(%f& zI6{td55G@*#&m5^1Uj2Qnn2nm3TO3$xL62mn^>t=?K4#88IZnNtPVE!T#=j(jhs|=EsBCJ zUZl8Ap$;~Q`Dwc&wl`r9UWgt%qtkjK!E79oarJ2n;d?!*b`!(qiq1-r^0j-HuumMG zL1wcdu)sudHyb6)!W7E>X!VIKm8@lyW(=zZe3qOe@^Al6$2>uht?g0X{_De?!yT*3 zr+<_0DIV*|fvOW?fG-{fZW+?o_Wbzph6y?+2>OV#MG@*_SE-mD^Q*))B6X}0m0IK4 za8)?~zMsZX-_L&LP!#pWgqkf(SP2J>Ru+IM>Bh&*PR%{@os} zbs2l<&q{?IgBQgVW~4yymUcOvfB6BZT-wopXeh2)yF{w}mg)B;u6uvbbq4tK)4Zwr zSum9P>~SzO)!#R{FC;2B#(l@AJV&*d<=>4{I=Tu^>`Oo@0O9!sbu zzw==3FV~(ulBhM9O%}&U_|7p3+h;0XxC!M(dR$wh{d9-3i{P)V3eBlAf(7+^^oGzT z704MvgP?Lvti&pT^onK*$EVqcYb4B%R3v7fg2q6zpjP11GSjk#2t~h|x9{9MJ|3@MGsGB;N3s&*|msg-W%^jeSa%5s+a0 zGg0v*Vsi@B7KExZB1U+2{k~dYqS}-=VM&jr0QI?BbqZJh)#tL^j*Y<9(jDziNBvnl zX2LF^uSFQ?k|5$F!xbtb@GrslBS;tgC-tw9E`eB3@s;%N$x$FgA;%~{Tukw=}CgFCo$sKzUXY#xj0Q1VsqXbr*&Y|#46~XE0v-J z0pY(I6%Jp$xnY(1o)9b$-%vG}ZA=fp^OCNOJAQZXI!52l&mlxl6K}Fz`=aS<0OhXppsSIBUUeROtto+@89w$D6L9f@vRDm zenhR93)0H~3!P)RgbiEY{W-v?pVG3X8!P}Ua4Oap#^?eNQ&hkZ?_c%5{>U00O)N?bhh8aT7#`e9HoXn8c^nlY z^J)hepg~2P(*@W?x`wwX+rXv;5!}*o63n%{FGW!?UfalolE>)LI&G>;0c|SdsA1S= z=7GIr_#1K@gENzbouLZw3B#^g!o508InQo@_s;8wgn4A=C#`8i1aFXhD^ZQ9`Y!u& z=48eD2kntwlSz0~de;C6A~`q5naTDO4OA($ zf`{=_m~A5FM1^~m;R~5EY%(FZGfe*~8~7N469Sir|4;rq-ce6YJfITtlC4U;w^vz!`?-TZ;pec$@(qoOpo;vv=Y)GJ>EV z9Q1Q`Zv%(5PYsTbwEM%{U^RBt@eek6z|Tf;O{ha`c>2CQSc<@}(Up7PXH#-D!L%)g z;~z}k1wYHCQ63IF*=LQ^Zbtn4NPYVMe!>V~%;43tR5=C2?M|fGb`D;oDEnm>nU=L& zm3!`zs^DG8uIy)a#NJR2Ypd#G8%Jt5m(LVm?JXu$wK39e@I3%~S~_ow`Js`K5zj-z zcuC`bX6E#jRv7H?*7mUhU{|q(m03+726qK_XyiAXpR89sY7U9oDjzYnm;mr+M@@F0 ztm`k^E}5;9sMfw(>MpFzd9#)V=IQNkrOBr1fQE_BBM#1^pNlL^?3b3}iJ)S#Kt8YK z>ad*iz<|hITuiRhS^`t9k!n*;G8hd02PX^&pfiBf@Z5XqWq6TwgMw{EUy58cxT|3t zFDT#)G!|M;B80%U6wE^7k1Opgy=WMk1zTk0Tm^p_*Z29E>@$P7`{~y9;5Ksmf&gFr z;hHkDo&@I6JohlqZEIT#Z}G{;k(32CDw;%NuHRKzfLT7dbz6q>KQAB)prZ9l2^Jfu z(=BbYsPv1o<->55Gfuk|M(lTy2C=tzQydhG6U1IZ)Wgdra4&xzE|qenC{%U^HGZay z1zP!Gn@XiEz6WB}qE+I;Lho`_@q@etfuv`L<9D_yrM-GE+w+>{^TX=K@6*G0F5#G^ z`~BZz?!ZL3k3GsR_2nod;=Yh46WEv~PatbG4p;d#0Kle9)FR`mkU<-H<6uwB;ys{UA&3oNS{5~t35^4Gfu45@C*wry6XzV z{Wlk=mucRMhI-Fm@3q$jr~P)0K|dMrytNc9im_tYGC8-yLN34U>Bnp`LS-h<8SKuX z>ugC2ty1n()$jb4%>%6)r^fPQg@)C!giKQd}&htVZiW?Kf#ib6oesRBpPIDo4yeW;DC+ynGCy6e;vid}$k>WKb9DvZ(AmbGHS zyOY7ySXyk6`4idwp~rsfeUG{QwjENEH|uR`J=ctED;$_7R`fOQr1pLOEaPPF!0b>k zmPB>TqwP4mDO{-DLAlI%Hhb~Tb%$%dy^mJ8aLQ{o6S;epSumCic*or)t+j9X#lO)) ziNwuEzHt;QEVQa04SV$0e12d`e-1hUjM+`cEPIh!k)F%O*E2vI-mhctlW zK?j589gnD#Bh!Rk49nyf`S@hx-<1WWb}`Uun7@!q-&OZUo9fX*TXlD{n>!Ol7LIq` z*dl!ZqPPal_a0iFXd_g0MM`LrGi$FmJ>DXmmigX1!{5A2_BB(|KK%OOy}cd;>197o zExfd}WNc()zfSR8a#Zj8;Z6s~Bp|Ia9~H~Zz%#ADeIw5f?lmPQxYTJivHq2h6~aPX zIcNl{#;iq)73^YO^e;q1s{<6XSfb?*C%ODqf6`z&&l@tM#i|<)#k^IMatlpus~&5_ zRX(@+i-KuCEsjPI_%8R0zOwu3mf~c?bt(i|_dO6oEB9uJlYUrPLAHH<_$eIv(Jb5f zpHnix;90CYjK()3@$%WUSkscebd}j=jz%KVR!Zz;x*LpnqQEi4!gRBO)D##6KV_mp z;Zh^Am))nPjqm={i)g}(FCFYA&y{_%NfCxQOcTOT*2%>#w%=4*U%021~9E&euN3Iw;fBkoUo9kons6;FO zgOLo^7!H-0&5i}V1C2ZF~J zbg*vX^Y+wlo4G#F#zQ~Zwb4JU;n<<~1c`zVe36#d7}apY&RYS}%Gw^gES!BmJ6WR_EbV zMT>%$4rsP3>;!i1p;;|r*qN*L`tvFc>$%@suY+yypHKpv#vcjsOo3GY!-)*506^E8 zb~Ma+P$}>JXB=El_?g#C?bHLsqAApIcBgef?Jq^r z6{RD~vM6>dmocmAnXQIu1{^6ZxC^0XDS_FmUw-L1^l_SO z#o-YuY$&Q!Ja{5oljam%AB|FDrr0O_cj-OLv0B^VLI~ z=hl>b5hv9@djz4I3K{5rHLX@;{5&EK`OM@5BopV!%e$UVDm4oJM%9l{76WB zy@K8Ux*M&M<%V&EGXvP3W2HNe{s{kEZ6pn!?5E*NHgqN=iIu^AyKqI%nM?IC8{^rE zdHm&MO^%IMls_CpS#Ngv?yOfk)J=Ms^MEvkF90$8DtnlsM~`QjGI|o)#gyf#LRah@ z+FAMYyf!F&|8{DC`Y<9jt0g<0YvmtL8b!&|XBuIEQAZ8u_r%>ZX@1QxdfE0pvu!i; z;zI0=k652ckB_+hKBbR+8l2ueO(DwEHa7;VmNGRAf%MmH;c`Ypv9#;G*ECD8qt)9- z-7Yte$PE3r9%M-D9IpIyT-Ga9WxL_()Yl7i?`~7w4QGDM``^xpo(_ZKsmKa>$C@?NaOMLRFPyaP-&TJ4qtV{gWZ!jl6J4nJ*srCg6nY&s+k07xIq1F3 z_SW4dcNV0}Nv~O12^^KE3zZKM$)prEGYapF8 z?qo1is}BSeqwMe8fhvASAKlJYU-Z5wS(g8TFr%s>ZFL8m49pDhM~b>^y1Sg~t6H8h z)}45YYZ>hJ5wf4n@6{K{Z@!b!Fg1tXLDJ|D8%hz^L+&XW7PpJ`(z95Qz`e27fIp0GI4vIxiTh=9cFItY-~TOq0^V?1 zt-B?&Zn%N&4y}*A&`vllR1+&>0dk3MGK;AiF4L1<@sh&qY>EXk2fy9M9SW&tN+l-s zj%IQndM6Sg9u@~5o}c_W%!z^3xg+|@(e^JvAU}Ht_K~prbTtg^LDgj3zry>l>clu6Z9W}0zIC5)ohLD5Yo(K!6;=uLqITQ-IPPI&* z)pvzN>ei}WR2Z|ObVrj~5SW`I9zKndMR9ii07s-h=`du6Ajbvd8D@$s5khCU0A174^1xNcGgVRQ+t$_%IuXrSMqsKy>fqq z@?v{yZD`~SXE}7yOl!7sEOlIY*Ug%cj%9`7=Ld3+2j^#1waRGcs^y&ARgk1q-*fXy z`gc+dN98WOqfyVK*Vn@eKOCi}NGCu9v&?c<9~E3&=>J;evVtV!k3qp*3`$OTSEGXk zH|k3{?I~x%8;K)Zo+ZAwEAi57#h?qNE85ys1ZKd8v zV6Iyqthw+vD{Q`px2ID<9R+e2vNd5iJ%cmPKML2VRMz* z!1)MCkKv{Ol?VF@8cuy`&Q6aW)w6S^1swb|&m(uq5WP-Nf1za{Lkk3%_xKcVqT6ZJ zH2y}(XgGGxQV}jsWHeFBf1PO5iPqVO&cX8d&NU20JwsD^9u)q}^GMk5l6SZH;HpR9 z+j$+);OYWt4}JMdE}0h{R`-Jb^noPzl;Fkk3#`WE77$92_EYgTQsjq+F}pyG_d^!3 zTKLT+SoQCLu>ZsP^~)xwN7J)64S$O&T#RB>DX{JHWebDNzom@CsP|#x?UD za{036s03NW#kR{=m0&c}?!8VO>B~nQ<#0N3)n^yDQ=l4!m0#UxzKtx+CydAdzAjrb zsDvSaVlu;Ehj>h5{a&q(67JWwN59a0=k-2R_$!SsV+{}X=hsiqH7L}eH*hfo3eydz zJHj;9z$N*(U>AELz7oDr+wX>E#qE@ju$MF zTOm6eE5>zI>@$=3HkDd9X@LUAT<(&EJ1ibapDf4G!}lU=jmt}=9iVNzMdE?5?)%nxNyIG>7W||BJ5N8$6DGQdM3QknmVhv3%K?LQVV_W*Xh! z4Ay&W*lWfxoEB82gfT%)KxB}YQ*T)!io3tGrYJk77>mH;ceP~m-2zPP)tchoXngXl zU_SB~(O*B>;t|lhv&OR6WDH;}gO!UbB0F4ac*IqyUrCJ>pv+@MdjXI_4ukmU zO7E4>#O;>ux%ZO#X<` z7>v%a%)NZ$Jp+i-jXpe#Jnxuv1tR>p98Kmu1>nGYRG-Lkx45b0*d^7 zODvrS@|${OE4se4Kkento&OdUi)kTO7EK3}@tx5cUBwcL6!sb$k;b_azGKE-mFS7I z-e6l;Y)#%$QTdUf+4bW`c%FI;1=DGEL@Zqr^YUX|aA9<``p5JAM3;jGJn^5v57(xY z@lPB}MPq(1G}Bo<@c%M>p*M=~X%eSvX_Wio08qMOfM$Bw{N2PG59tYarE8so4(SRP z@%N@x*MAN`B&L4yYXC{G9L%o;@q9e1XmN+m{~*r^{vv=+O?WBFc9?)?q7wuA*Voq% zEB-2iIpf)tUh!kJ|6%^WApwDK2AHU(Ip)oE#NewD)gPI_uasExg0H^R$f zpegaol>66m>Z?C)oPXu+|KG3hLF(UlKA4Oq_+XIge{wjSqt6cleWTw;U%IJ*U+old zyoxUe1$2#Kbg9k(Vtg?E8MGl$y^saq%Q^`Z5c!OZjB@Yrj&eb4c%u3s=f6RIPauU) zK)GM~PI&&(uj1kJckiAn|9kHMJ_zP4Kexkk0r2slJ$Q7FvEwVX|6nM!Kpp(}GM`^b z0B|h>j@N@C|3(RbR1ZPdFt+MFI{(=8Cb)6NVRglSm)e0pgl9e(FO@Q*-&Z{sAr5*Kg9~zf14n8!{lY{PFyEpMe|S(9ER#uRrMVjeaFj zOag))g(vvlG7!jj{WlGf^lE0HxBePuXP#gC3pDYX6Pt(Df4A1c&-r_vlY00+y!QjQ z9v#sW{O{rpFYt#0=es-R_>aL33EpC2d>iqIs4mB203FT;-NC^5i zwE*;1E`Qvxfr1;qIe6#mJ;FDN7C(1Bh(}5D{da487QpRVjq^?XA3Ey6tydj# z82`KYcMbRhq9XNB82_>T!K~_kT!4LbJQDf}-`?{)F4|JKRbQK4VS_vS_ly6$~ z>mP{iQIag{Yuw;}^7MSBAnD0K8wXzgcO`)C3HI@%P+!27TOGAcDF>zyPEX50dCp(d zGOHrsz#p>)6!P)<|5}Ca`g#E&Uy#Gc^508#-H_N=%$RAkSoueDg+(Jl4e&NvTd&zb z0WgJ!KL1ILC;nJ64%n8CYvUE#hUN_CLlTmwFc`SeK9mt6(9p*FuR}ca0v}V2Bp`*! z=LmfPoEtgm-YcsYlj#wv#VN`FhpPO_N{n;e&f}wov!jMeK@d__?E=0bud6ElUJ4ZSc&xvvG+PBE zZtzpJAHNo%Rj$^FSfG)8j)~f^&ETzLsBi2&wCZk;aKrJh>X%E=+g*m>NFRAww9Rwc zZ!gCI_y##DD=VYA#mBiPwVd>StGxwqvC+mFXcgs+~n?ryEd@WFc@=k<#dm(8U%<~#8~HVt2@a+ z0q!f_^kbsRJ+kU29gpQRz;Yi3K852xEBT^JDiaQ*Ap9e}`a+=z1w0|<=3Q$2c^x^l zLO_A-K41?2^l6grXe!7bmfro}OO4LNKd!Z>{>jNL0H_6-eF0^1FQ|k*1}18>t3mzAt_l7yyasX&S$IUt zvx%Hf^2wtccoV&?Hn*!JB8P1(E*S7T&jn3T_cK1{RLdXZf@Hx6ou?-9R~pVveeo<= z*58DYpnB6w$k`A6!b*3R<{vKpTP!kLU84EGogyt|PG1g~YMcXNx!R+Zi(9?&UNb)< zxbk4&RoMm89P`9$lHpKFJp%H$cA@+soKyGV!w*GE!18bm0Ooks`-S|Unn}P0f@i4L z;)$7f1b`pEfKp;bG#=TnkcC1l_cz?TXD9=c=qG)5hqHyc>>9RqC+mtBGb|Riw{(T7F1+*0c zvJ4pRrj`6t9Y8fddHOfLp%TfSVkP4dacyXNFTFB6WF3D zRMm2F2&xs7fC|*oaE)|Ii|On~d|t6_APb%-u@0!SB_M<;Tt8Ll0j0xjK~f6^d>rFF z5B|irvu15vI+6i3%k>2<@dh5wS4WFMMuPbTzWTHj$I}9!X!i2_aU)BzcckM3*Q6~z z(>1U@0-y~-OBw|5sVJl5H&j!M^OEJl9`zw z$5cZqe&2n5nV8Ac0=R0Frkt$WYO!Of1($jOk^kA*>0ZO+#%#BeFp0EIXguX&A21U- z0x$i9XwVwqpVLUt6gLC@a(%zwiF*r-X82jprN7-Ci=D@oY_ILQy=7<;qf^FRrwL@m`KxGI0NDq6A6BI{FTQt47Y^myF@&WJ;gfKuvfuooM^43nYn zz;y!TB-cC0V5|y^ek^i`N|T9;uik_bbXfOOTzx?US@dc+N?HT86ZEHO826A6N>7-F zK&nyf#mDWl$4m|TA(RQ>Yr>T7=48!EDPGflBT$xPC5z}=bf@jiO>N+eR*@rK$+*E3 zZUb`=s3uQ)n)X?ZDV0GMdGC;bn-GCyu)cPW#t`od<&@&L>nh#fi*Zf%7&g*BxeSb* zZiQ0IZudoIQd4@jZG(awQg7O9@V+xE5?=jb@eKncobT)uDn&*h9+(T2hjZ*_ zix-)okI_O9LW_`Y2AE?P`546jYf%m;jNtmT6Zqb&l(kxiktn1JTYj;Agp6RKjR-2e zB_&Smd$J&Z=4w4#4>8BUE{u^1BKcP_b-rG_5 z*S1=^b3Q*C2l%fI-Hz(NwRHIv*hq*T_n<Xw+URCaJwkr)o1ECzJE9Pfzd(95TV6UoH!Ulze+)9=@O)u&@1D(gp-Acf2`62k*tZYj- z!9HA=o$?G+cEm86IL_4iZPU1LhWl_3UAOpfW!!v&b7o3`bS7ue_CEX%gdP1RM52g6 zTpVWA_^N^!>4Vq#fa&6%EXDXG>}A??;z!AVQ^bftZN|}BMalr9@bxdWU)3Ag5}Z(< z%bmtOzqyk1J|`OMV^<3k+$Ogp>KNTpPB>-}fzTq!+Z5Gqrbb3Wgtkfd`*KyI*m{ms zI3s99CV_1n>oJrGBFffki1h{jgNb`#?5T0mi~c}gB1J}6TN(GtIT%JD47pK_ladDz zD-*TY=qhlq^-K+upz6!$trr z+Wo#kd0Sw(C6>%$me?o1o9%OSbTr$)OT6~|XKAS5rwW)mk{Qy0(7F=_yBrW(7dl)7s$giuo-%OfMu*F1@}zG92I^LWXGOD=eLf88L{-^9#3-b+1^mdskIbTSrgxc1*U7lY@Dy;5&v31G|@`v?S=0-u^0&o;-*D_^8h z&RN}Exuj5J_w?4A_@GuHzo)KR`)u!#p?qYX5Hv_GuvTwSzA>yvc- zsc`AoXPm1alqFNu8$}h}=(aUfn94Fz;6+G*i%}2+n(Cx6Tpg-HX41mKAg(3Pi)PR^ ze31xY2#2Q<8P1tOZutO|5N+|0<`^h7wWzCKp+hJ8iL@_6iewK zKCu>SALx*Um{73Ep7wYk<#+nOgY&3ZkCICY2kw@NqUMdR0LS^jC@J)w!m1xR-wqGs z{xO?e(a~2T2h&7iGh*G!`MsGDh6$yaakh zI<1%5{pDd7f~_uEwVClXQ9ub`p$dd)$;D{Z>#Uac0+~yc;S<0CO9mab=F;b`K26Tl zlE8R>Y&I~e?zyEkrOjKSO!6JB;MlqVxB(7Bhx!sBgqDG*z^4I0WexIHSps+0j@!@r zDm(_42N^A%Yhn3KnU)5*G^g@tmFPZ1D42ub83a0|>4*)aVMW>_2{zc>Bm8j$5J*cL z`uducN$sa#p4;aALAM|(5S}$WkgZ1W0Ul01*E-&6rZL7H`;k_An`e~Vom3<&iJxts zjIN;524+&5XWj{`PJaKg3%>vapB&}8tdOF+Poaz@1 zl{-@;A`7o`5y_@St3GMFdj5d2U&H2IX!miyZ;$j{IU;OB%Zw2$z_;x$^>Xe}CQv4< z21Ika=-Hw74vWOZZ7r*)79T%fGmgbfggomRXQD*FcdAi>t~Y0PV&a86$-e=#4Zp0O zBTecZZ${cfD|jBtIx}#*mX*wARDg6ejR#dwAg}#?-zteC#T322_WPae10)fpxuO8{ zVJ##$pkzUKxP}Y|Wq{K_)C#QoyA_#>IPS&ll5285jga+*58}S2l0z<$Ac7we2+|jj zzQa|A?2N2H^`n;~tK%7R!cKh&3!iLbnAlezD4RG^ z9M3g2Q+6hTB{JK8WYF_Okw8v?2D|k_)ox)The?)BXFit{=Kt|7C6g@ zvR*4D7sBM2FBIs7z3X(>7-2V&%6FVd_C_~lqK&XaQhY4wgunY86jP1a}XVpQkx&fy7$66D_Lwp zF;(w-ydF52J+ecGK1L^n$`?MVeXq4KYHglkQ7CTvWi0MrWRZ%GSr-V+#9-1(j7`<> z8us$pK$^lY8kW~_z^l4O=dnU$JU#aev|67+35uk7L7=dePk#+(8C1wu=-#&gEqW4e z#nP9byfyzI$-wMmWvlPuE6<+{`+y9$Tk-w7ZFq&khTgvwjrY7z=XKeW3a&ek}HtG?}&Ze4kEO?oz<>i6^~V^{XW@h@W<*^gXr z7Mhg)s86}w1R5n_QtI|O@&Iek@}ec=ve-#?0*$e^_f8(_&Zp-!qs!VVLD@62sh*-L z0bLGEE0~JEZ`c`b=$HV|NLMFLAeQ@#Ly9uZBa9;awnfx8;;&@BVTXmi=Qz!%UMek8 z7=A&CcKvutcKhuPZnFj>q7`7Ober51hMS383BY*kTXtE?A%=(Z45Rh2CKWarCTN%8 z+6O9`(}QSIS_Ynu#1hnmM6OZ|9o-6?a2^o4_ik2R9s zKJxw(&F(b8SBOD23m3ip>t-Sy5GUp^dFxvywBA#e)NsplT+bnt=~8yA;|||P-%MSJ z?~{k+xlV(=+L9*2E@hV5@SAJd1@vkv;6QZyX6vA(HhbqyH8Q%mx)?uYZV66K)oS0} zUyE@<=MmC9<3I#yJ=Uzw(8WxEtSz}&b&)sB7(woHa?$$$QlD+!;G+EaY0}Y#ZTGmx zty-dQQ@#_+Vs?&#sO5Q>4txQ3_%Ywafl9)`M%GJyOk;Vdf;}@5H9GgYs;m4#g`@4H zm+XXh_;7)-^U6qm@!y>l$8gW2{FQg4n5NMf$I}S%;f%9-@!fAs`RZH4sO?ar*IBto zLZQYng2pl(iN}9rP&Tc){7!ux-lgIW{iv~ta8ksdXDdk+7dOmZU9-zKq|nA*OAZ1E zKEKV(eBrwPT6?Mxgj< zj6r_W%{=8JXRQEpvLhDReRa|!1)wrVo~k@B!GL*E?b}D~M}``*Kay1v%Syt+uZTNY zPDdW6Brxr;h!^*q>=tJVj>|-4u`E=J4Hv14E<9t~+thrFa*hDwyL`De|3t(i z;$wVag*Y%Vxz(O9FdeyXzG%w2+4D_*YzCTwTgn|SvUgu8J2Crkdir?buL+ojB)eSY za<(=g*G#Uf_T}#FOYfej?JR~|s7}*7e=WV!6zYLj(Q9aN(3I>(|5jSErMd}hm5Qyn z;K$=mp92`qsUKDPG5^UN(rLY=(5Z1XjM~g_AGJ|lT5Pdi+bp*}Jtyzo8iI5#Un@{~ zW4W-lhiWq^D<|)tc<@K7Y^Nio=$3BbB@Gnd*1uT;Ea;3Bq)>p;UyB3vp_mIk{SzfK% zgZ13)|EoN05;gg*=nv)mSQ#C=@tIO@Ik6}&CgN6#;nwbxe7~q9$F@an$Ahol1?)(^ zz0p(TfYX=+d-eMsPgzfjwLNhe)~D08Cq*91HCaOr7)&NL`zqt#@izDXM6Nb5zh=Am zZHb$b?_qf}r|o!I;dBL$d{>#LEQ#St&-m!Rwz;dBeA5-%$dj(e!se@rmi;w9-yPNT z;<}(Mkhbj)N_h_|{D)B{XOWJ8UsN-m!?Weo86}#;rjRA9uo3Z)y+%&&c}-(;dmYkZ zz9n(fT>8Ng>#=CU&YG>h(}~F1xK8)N{r!?LIFiSL&M@iL{3BA7->&Rv8)83V3k-l* zU#9;~@<7$Ib~AB%%=qvIS~TDBL^?}qi`0d|y*+T_EZIGPK+96vqSp}1?}WiLW(tF4 z!DY>N{rC1;)&=U&L~Hb;pVYFfj=jg8-vv+ot9LLvZ*ph{z)0j=vCe&0=QwzzZFICw z=(sg(QX>WFBS3AAE$r3aX`GXBY9Xb*`B{@czNf-FMg{XOG{t3eemk!?CA3Gsw%<&q z?T&j_7Rqkw&iE-(^zW>%X;POqJ{@8&v15FGjP;`UErTPjgWuhh#q!)Hv&DILP!31# zoVKFo0t-X8y6P-XXkB}&>09|&=VxRNu6{)udq&%rel*oKV5a83|m#%1f* z(k`ULYF53peMsA{BdOLC8=(!G$e{1~noN3x^{1hUb1>&%vR}I>-xMrkd-u4fC#N0q zf?48}FRM$W#Pkd92QVxl8&210>bZlLc>ytU%wxrca^XITA>6-+A-ulgqb8H&3!^)} zZ@aJTRRzgYB(pGfJ|WffnXmVslOJ}9S8|$4cgca8J2u#}Z?L^kgVGuZCb3f>_?gb^ zb`tbnyWHljjfD3jLrZp6@)y=sr3+AF`vJ7}B2c4G~(?qZ7ujA6vO&%qW2vSd8K;y-^lqa+>VJKsj_J$1#YJ|oBi)@RyF87#tj zJh8Qv#Kf53G}?1T!a;wB?9Nneh?q-5+7J#cBOZfTSzj#Lt9V!*FsFP@u+-p4xI@I@pPXa$zIn z<2~EIfFgGHO*{n+F)fkf{r%Mj>V3?(*mGPAYLqdb^*$7w+%0_sUdk?F>)g;=JETL= zD9IOB;BRL9$b9w@O&4~H1rudky|HA%j>Y<3{Q1+V&n1J+hbiGUv(V``73`iztFJgy z0S)HNdhaIFK!M>n`~+mvd$*fqj;$0dt$x8qpnIWA&((~BKX zj7=JBJ|2l)*q`rZwiRvncXq0AcA>O+;0A#$g;ywgpf#BV#6n*B6xy#!j^vxo97c{f z{lM+zoi*BySC{AJLQ^U(vvm`f@)>kPIbI$vM&%-f@3&+9p!GvP8I5{n-xZJ`xTd}D zywsYjqvlol>;7?}&7pn&iM)!R-i|KU#|^!qj|80{F^pGI#{4_1N6=pyz=?o&Ub*x9`8Rp^*xG?6 z(>`-dsEb8{>~r{YT(GyAg*Iziwt~$=*RTi7hdE+S{ZqzXYnPs1g4qVUwAqLgU+v>I zQ?q6M4w0xAZ1QNnbGBJ?3lJmyJLooGvlg#$571YH`I9Z%nv+|_$G1zvQX1Bq9&GwR zU%@NE+xC#y5Kf%kE2q4B>0(dk&rlIo>FrTJF6?;Dp3o5FGX=bJ7RsJdYU{T4He;QK zAT9rHlr`TtNrb+Q{)fL%A!-`(#P8!h({|1$WiWju= zuxAg%3pMCxl(@*3p2|M~yKP?F^MIa~8S0Brxx=l=H=IHdAJTazYM$(|=28)2o5wE+ zIMZO!N>0CuDje5-{8=~N)hS+}`2JbczA{LqiEVSD2s!=rf$exlGt=r`0-V=RZNbJl z;0f*x(xQQU+WNv{%XK+{wz`q7JyCIc&7!lraO|GIqB`VNK}1M83p^jj4VjIR3uq33 zTE+bGp}Lw*D6$$BQQrCMp!CV?m8NGk)RINZ!@^@kx1Kkt2>sq%YT zz&&yC{VtmpIz;yM=~cP1cE(28ciWs@yb&oEx8wf0z@nY1w>>(aMhg}@h1j!=4}bZn znKTdI#2wCj%n-)BDkh1dhWy6y@;RKrrMYF29ft_}u3yXpPTm|4^?P^qm??d!ym?FX zqx(mue>xcG>h^j+_9hRCH&dnByfau89j2(%}0dk8tT{kR?Q%4OQ8urRe4jCouSC9Qe$<_$j_r}I}6)NymzUINF z(%*as5C7(h3ZM+()9haeLP!u8zSU2!?{OwoB-YJXEGd>2w{iV1>;4EpMu897B3W#? zR3eH%&AQPaANliv@zXF!TCR+iX6>wu2nWS^obzc?aX>DLr5yhmzi9aS>1}t$)?6Uv_4#`-c0KO6tu zjUUEufA1gYsAGqZ@QW5-5mTNEz(9efaYjMoSlka#nP(KQ5Mwe&6qmb9%USyApA$Lg z@vIa@RKb5liii5(anhe5>l_NI+SaQUlDP{pI&1%55=DP~WNhm>hlM@@T>0u&2FbO5 z9pQ}^$*WtRENKUn0DT>2=NO9tAtNxfIV>w23Bk875P?tGZ!qee{bPi1umd^-kEPh| zYLd@#HcN;rBNNj&NN(B(kx;q$0;kd$VxafQRce+6$>&A6k>q-AA_Khel0p|mOn1NN z2b~X)THqU?n$&Dkz&lgPf;>FS5Z+%LpByOt3($cY&wTIiGMg=zluTqn0lPI|(sY0J zUCw#4ufxD$H^lZ)B#^b@IZ9vgtee0{Ngqoz5}GzD3AhN>+%`+UGQ#m1r#L~Ac{E=1 zl$IY|w*{=_U+%;RDuq&)1ImMoUL7cofdX9572wxa^59P%n{*mgyT0QxNa9=KOHf6E zWc@io0+1k7p6vgQ(Z=VDihXvLGbVe$xL5*qs3&S1lsw8$f7)!>oh|-j0&7P*)|q}k z88#|+cI@r|IMb|fY~cDEI*Kh9V3t;LxU-Tq1nZ2<)Jj@T<}^3l4%qHj)jyl}R@=T0 zpIqYWeF$o2=kC~o+B=?TC@J^qT*b7%1DI7~i-%u?5@oy!1fNt7O21sR_@)y1!*C@y zNdQYhP){IU@cMtunh*F1fxe?He^RzJ8^YkPeJzfZ4UXOg;*Imfv^PSG$9Cp=XpENh zjV1EUsDR^UL2)BowB$U{(0Psk0?RP9m2=gfxb*RywYPXq3P|Pl;NFl>_KEoJI)*PC zac(Tno;~vf7Em?rEHrZa7JCG`hW&B^X~5+6|%na7e$xCdjGz&0o=woq-CFgkYjAEmSk;qDS)_guI{DpY zKSU=2a@Fahj3qFb4iyr2zTdE}1Y(H9Q?%$6(m>%ZIR)CSNbgRuT5doT5jKqyv>WJ+ z!DpS7Xk?#mHxjZdcmST7RPWW3S@-!VP>9eMP^lJ7-6H}wiz^s>t=Q)w$%`V`E#&T8 zl)UuO4Ea1J23$G_G_x+v4tq;kDPw>fWGf-ucY-G|5Xxa=5AM|*CoNnfLjYE{hdW^c z#9{D{vA^ngEs`u@u4@3}-KcqGY#$J}$;*H&p{CeUv^E~&2~z7cNu@XBSBVjU z$Uk!Fjm184T)yU-x%116FcVcT-b+DGkz7w3%uSm zgd4;@oRh;BLx~KLX5%(DyhG@LrV|+){d+R{Ox|M#;78oHAKd$%PTkpvg!#f+k0u=yVNipuMQ^M%C zB1VT*Kz8mlD8a!12*ec31dM3$9rBoMh1GV=vq}jweHx13iR2eeuD@dR%W>1p`<-y!UL8g+VJ>1;i zR1w|7?huI7Wun>9cT<-5aT8$L-lQ@AJWvbS56TZfMnaS2~R2FNv2QXGS{GAk+d)kQOeV`;azb2337dIRt)=9=y|&%-gbkW=p- z_+_+cv%5;bB0sWM%nlC9n!WBZnvc)`l;wYt1PbNAZhl6i&Mz&tx0!yx#oj&mhOR#B zcJg-J%A39q2y4H-zz}i~F}yg(qJ2$02qTO*J5B^^P1jAsc3E-L5H}lUy;(cyIWE=9 zZV|d5K!$*@z7N^^ombkEL-CN1afv0FvEUiq1ItFBcRn2n$@d3FQ0rcBxS;|ecl`w5 zQBhwtiY25FKy6k+FDr}yKFk6mj_t?yt=(~#n)gQjG}xJP#L3RcNct05?>!>aO+rmGnWX>@71Da>LDb}zRgq{CHW3o)ze7cY?+|1_Bt7%9C8)!wCVSa zTHX?^$|~<%M!M(>mb`SNOUHXo)Ea8M+8gr5a~0#tg|#+=*v8_SW8LPpZNRsEVc&P7 zZeiYMwHCtIjHFSUSBB6|)cg4|@m>+P?N?JqXG!6YgO)e?5IcH%ZYIhUPai>tpNC(u z2{0qyzi1ftLJUx$h_RL*9(-kNBJrE?+N?zgxH(W9M2sYfVCPN+j>$iBCC0XjQi4K( zA=1P}jSh@Na*Ty=OQc*6)6@P{%CzUfzh*{fMJb%1Hz48W3Ufv?8{7PBxPv-#V7VIG z`{~v<3hqrSS-|U}zCgC(Fj|sf@_tL^4#f(K1?xm}2nmKvhkJDczx3z_Ak`g}I@m*A z7@(=YH<`=_#`R*9NEL|onxW+pP`^>p*98!E1roc9Y@Lei+UypcuNrvQ`k_&_v;{@b zWC4qWr`Lj6qk%{3PY#e*K2AQm_*ZTE`$}MxcPlKioTey>G}@moIh^Vhn;gfb!dz{I zU`8Y@YDW81luM7kC`0hDqq3injGBObELwr&J1D#Pb$l;W7P|IDQM!t6ssva@Ow}>t z*yJpj-cxPipfwXL8&{;u?fR{_u+0ezG0llUOuMcOwcbQC$+T|<#}x5;ngP+42>pIv zVBH%y>~l?U#_MjQwhY~DBn%b2QZ2+46V zvxN~RZ#hbNz{-|_c_jgf(vbcv7DFtjcxpI!>FXQ5C}k+Vd~uF<`n{jPN)vxN4jEXEDJBCd4Z6duA4ye(`qvzyYw%4#z{2Md*vgzGT8~9 zzR8-{M`DI9s;4tv45ay#Di05V5G!5?n!4|(Iu6go!3o6Rbl#r~w+DpR8?UjxAY=|g zlLjBk2Kuw70*XmN2OM_|$`858@g!y!K8@tJAdb46s;GNe5%yUa!%V~GN!SOWLQ~5k z-5?Z|zxOZ@?eC;=*q~nPHKj`K9Zgl;L?yC?X-jwNBng$qlI`LkB|3< zMt|jN$l7Q)94eeJmNlJ9&UTYZvc#gzq3vrTZKVZD z0~Um&Ay;JPr}TyZuEI=laKtZCMICzV_e2d0N zjexxPvZ6ZdRTkL`&O6b4axi(kb;`K4%8ZW%r08uooScZD`tY{L8=1GqGd|y zB}D5$_Hd?kyET;KD1gud6M4|crkl!c;>s#aY!ygzbae7oL3U5!2wVBd0K$%a`y|lq zrHz>nmNH?^a*KfM&*#R+!1R@EACvGllqRWpUf{@1|0WmVQPqwjxhQv9rZ%S?!Z(wcDs)NSmY z{QRm>??7O8Q?Iy#+N6Wv0z?xt>IQ*+7BzY?mj-1f+k6_pp+j+hX3c8)v$h}mcm<{y z`@yQXSIT*_=WVkuN$NvBj4flzM9MwE3DKk`N#uxyx{7aYq$$&2Trs#=V7(W4kChqSETNVZvivG|8TZ6<4iAqv9FVSUyX) zKklATL|x|G1B5S}2HI?R1VyY;6199DF9rzEJ6mbti`}DoW!KB9!J<+69Yp8L1z`QC zb~qZlT7?uYt}eg$Ysn>?G`Q8RIz&Wb>U2SAGu(1F@>yvSrrA0~HI!H?S-eSJ{cF=~ zqOz327I59LMJV`)A0_oV(ambWdCb&cCESILN{<&?3C&@Z`T0_2i`6#Lz!5EzV%Y~D zI{w2nuIKp!dz|so$AL6a7kpdQ3O+pN91Jfi`~u_4Mj0jSY+ipTxtA|a!6x!fkIW~F z>hyx_2}p4ad$crKm%D-_$@gN+?k(VQ^nybL4C_E zI1>-6iJiklQh20AbjiaSbaqJ|h|_<*%8|+f|J}Uz|MV@WxxKw^DY!iLe$KgCq9YPK z$HZbm4IoGUA3@NM=J>9w;8x$Fm(LsJhDS71n^sU@H+uRc(2-#Jr+NNQpEElgCTV#{ zqvmp5Xr7;1+ow{&m*>tio2>@a-}Rtfofr8x)iQ zcbH~?_2^{?puNCZ%LEkRTm@3t7aVXPXn3l8fR8zW=eQ+b3c~I2Som`;(7kbV|99Q1 z@PxD;7?)3YPvEsY__N6g(DLkpL!`{dUg;G9sQH(z2k3ai$_LblxwgnsIxZDVo>6Ip z1S_c9h|)@u#@A5H2gQiPeDkh=cB%zRgWfEpzHFlOkpKXGPe|%JWB6oJW#&Iw=K(>3 zeq1sDBdFNrCa9$sIgS80k^@5kIG_|1JRZ-{&&>nMJ=8+7uQ6cKJAq2U=W>Y~S&6e_ z5YPIp`H^lRE-*CHM#w01aAf^>h*3R{hBMFHteqC@144*9s zNiMuFtAV5UfW3ZouWPE#ixM3o#FDHlAaQvR?iN(dU#S6CN7q^@m1_#u4La8!*f{)3 zSN}1xOaR`q|MNONzlxu1m-crXT$R74ZkRu*a=!n2M)Rc~DnDdz4Imk1K;lt1Hs-mr zJS23_9dG0x5MA~xHy>59IoCcewUGE|D6*Z+w+m>HW_M4C16_bi%(^rTp)i9EB+Nyr zQC+^>LPo`a%dmX_=v~$SZ(&3n#o#k6FkeIc?5hHyrxB>9k;_ZW`3ck_ewi#^i-ee; zot{kk%X9RAYH(ewFVIjD4wAqqzW{&|z3`BwFPV-4=uaUANK1$i_HwU4w**`FPGx5$ zS3hLt+w5>uoCJW3)4|_AyFhm_26P)(-lvX@kte_)*iPI0qE9ZKJQxDRsD&bHISIXAGS(2;0dWOB2_1&|^~L4AAh%F*BzZZj{GN@wn%YQa?T z#0HX%feOS=hB;H$GZUOkato?&mg!HyVL)i4KJifheBlfMi)10rF^5h)P*Klf-}bC@ zCWWe)GGJ?=vN?AJO}YxT%nS5`9#aEkjcd*dceNfUj;_pn0kSZmqj~WDW1GYRYw$v= z$9aY`a)gWfGHBJB)j2LvzK~uG4GmM0UPU44IL=}Zhd+iS@60T$?G44Xc6XeLTN#D6 zXpP=4=Z#q6Q z*?TZ3#7Tn(kImne>jP9^ZmQ|S((}$}p7m~Hr>9YW?4s|_>tXi8{;&fn%g*}bc(=Zz z8W~x8TDpu^6%A?Y-;5untOCcX_tYqksNhPMLk^lmn-d;U2*>SfN6SCD5j;)nt#D=4 zlo4t!G&E8N_Cq!0z;B*wR_-?bwoIqHV>&p{p9|$geTD`lsNAJ9-r@c{*5+kbS%)P| z*)e41_+ZNk06>Kr`_XTywu%eD!DQ$;FzMcUF=n4N1owh@fq*+d==#KP?dLZJ9k*kD zjFU6=K^iyw_s%dim*7X4hi7v?V>yGnL@(dwdt^g*?RNNWzTn%M+_7m9oYt=+KK?Mg zeoV{rEKluq*Q1+X_h--koWX_iU5h>l!OWC51D)*90_r@IRM#g*t%vQFH z9Q=5q-tMy#xdn#1pr%vy7d;1f;!UIZTKv}>%zs|8qLKRj-cy@(LlWzQu?+nqGVJ*a zpy-Lmt02MPy=@E75|)j!+sgx5W_42Y08#i!6E5txq$!*14{Fvg^rtWXo}Dr+%yQ2e zp>2W)Wv2Kap4Q)Am*ACha3MJZ+K>-&+i5fz0NfG7!`n{gg-;$hR*%KY=?nG&4aESM zg74!Uhs|rR!v{fOfaBWd@v{EGA=~13%(Qz7?oOaVkmrzBtilHGW7|E0`mKNP6kBxi z_L1I54Wj;mJOZ#Zta*LNz&wds5{q;_Vk~!mux3@%Zu|5A3fD-DG^W_ zkuH^*AqOdyZUHH2i9u4y0TiSg1f;uUfT4yud;Goc|EzOyF3wu##tWA4#NPYa`}?V{ z?{2txPsd*H&{s0w+w{XNuG==`k%@+8l5IZ$a+aZ*(9>CkL4$|OL^{e9WCGlj`?}B8 zSIJqGNcb@5V1)GjMB3XL2!G4vtlJpsIRSjH`v)Ly>iZuy;34#gp6u?pWt-jBrIJIV zEh7|WkWjHp(CwHLDc*&D1&g^+fj~*zx z5mNN6A+hzkop;fJ3f{7S`C+=-l33#wfLPVShBO8;{^UFEzgZBY>54uG$9Q{g`I?gAu(-;`9~z=qE%c-(8t^ zUR!^ai1V58({+xoVKrErJ=C1SUM27RD3uGo8&M5PyXf7%YQHv0iI;X(4MJuiaT}~P zevtc0I?K4m5e2}QCBrf-Lm#mKxGa&dAJ8TNkaaWhjgG#nn?ME?mbE4XdS|Z;;y2YE85|fKk2;X6bJ3KBTxq3w-;0=1bAi8Crl}bXhjVXNQV#JGuSg$ z?%@9KDCLt!*Fq)wW%ifKpDkM)H)Do0CaeevKnCd0C+=s~P69Q*UoagfD4LEIlljV& z-lh>*fwMyIq=z)wbF(Gl8mE+24L$nKN8!YL93iyL%JI8UC`KnNCraur{ym-oB*9}p zv$B@*vCs&Z&&YkV-=^A=-C>6ig{iJg;;_GYPLn}Ntx@n`3}gp*mGC(UBKrD25i75o|4D_AWVn$!

&>Agf4nHuhrZYvSFI!Dv1iLn-68N zp|+AnagV$je8|w7HiWNq0Ih0xT-k>=zGwHpS^z0Wsyq1n5qoiV$O4^5eyLnRR;~PP8Vr>F}N4p1$pN#2MG9X_1lW=MciW>c)@z zDv?Y*2x!c%*}d;eaf@PJ--(V1tpSl_dYB=+RjtUoZQYKRxQxEQ((34sKN0T`9D~1w zRQo>=@KZ?dVH#PHN!){OUJ%ony{Mx3yzVQgM&o@jA{U7o|6M?FKjD`AIH9e=)Pk!1 zx5f8(${h7VbaVBeqzSKKUqhVoxBa@TQx;c1av+BlA+>TfF~ZNhTXO z96vYrA%SP5Rh98?V0Ap2wa;5+Um&36>$88A>qeoOfr%mXMm*Fck{M<)d2>c-*xB>; zdPA@1&C|IizL4p^m0!{q@s1~T??jOjs$}9JcFJsLufL%~;tvL5(AC#+w}u%|8C`=v{>9)Y9NP$@`X7dkDd~V8xSZKB4>M530+abcK40}aIgb!=kZ-kDE{uw3SWP`(M)5mte32mhp-i;uqYAlp$13dj1IBc48o zn$jItF$s{U4fS=cPg)RE*&i)fF1^QiO*~ErQ{mxPa2$aF(zd$}L7JgZUg*muhG#y= zw)CtgKk?B0303&fx1=+ByyL3UifTN$W&B+WCedGQFSwNO6NxcFghW#4Y*K~oB0uIp zYE(~j-tNdVR0oL=soI8pQ5rzgr5~Vfn=)#y!=n4f_dW? zlx_JOh|BGYxd(NfEKFXU+-B&-7Aam$-87ygbGP5L@hik6x5sFCGerypOc(!bvyBNh zd=fBrbZ#d)3`)TjmSC$qF9bB?9vh6rjDlqvglK3fp zzxXxA6`KBCFQVF0uO8LGv791UXTCxV)efzkwJ3+(oPx^j*YT0z_aG?C1(C%iD1zg) zR*_pvln69E`p>S6pdizT((esK^wqA-ImS@wV;ZXN-=fI(USuZp$5{-@28Fl%&U;@S z#t0=NuO1@;(GQoe5;^oGNqXzA>0AK&G0*0LFB%p8d@jYZL~Am#gKA`N!65s?QMIns z!CWVlN+wb!$loxqUkv5pF_?tsTUtfj+C}th#QX%74KB+N$6Y+wMtl>clro&RgZ?#` zd2g<%M?kbd6AriXamds{P{Ytqckh#HiwSb@eZS>F8(MhDgoYf_KbkDb;bp zy!7me7TXnFGc(_1F|X)+4ki+^oqU0qRm!7A=-nfPl%%0Kvc7{-jnqeMys}vjc8oP1 z{3Srk$V5i*aE^DTvBe~>Qh$@`Hu|d~MTWP0GQn{}B_*5cMmioinR#rIp}+HSUSSl~ zpfGS-y)D=Rro?**x>hvOC-*X9szQN1f z@GV}56gna%5&W=Bjpr^m10`}$M<8K$6|ZnC5aLTmJen1G2^VAzq^3>&Zb+exYUOM@ zwoV9o5p&yj50G=rUYg@qvCon)gd!VRzWQ^O5DNVCv6Y+85z79PUzrVfT<5D1-+~#* zXr2n7$Vgz-hz3cU)|d8=ny)!LEBIQ2C#WPTOLCCD@m8WCW|^73y2$$&V!^vj`njnp z5pgv}F!`&IIJ3|3+jW&W{2eTvDJ1`j%8ZsX^%jN`pJtAQB5CjKl+E@q#qp!eFAm!B zjHPgc;-q;_$}n7X)7!J&hP@-s z|J$%3=3U%)!9(y=GSJ%UdZNZj@p-m4zjZEq&djV#_0OPg<7cDIMl%_c1@ZorPZU4w ztqaT{wRokH&hKsRxsIuL_{H2ddo+6J(lgxfFo;&+n^A@q_Mxf#vR^W+4PMLtSx@Qn z>6=Uk>Fu#^0lHQ!oMAnV*6yipjCNDNmOs=K8;4qbQVvM30$Bxm3=gZcXJ?BY`VHbE71xWOLL8JYpQoV` z2p}Yw5+rKfLdvucKfw;=^E~pVG6O-P(3C2^_2!u3cOy$rGRAZZ(ji)~O%*WDu zRd#u*J6T!SZoDnmJf%&3^{DuzF%855Dz`}jk#>}X$emu-;OKu!13#|!ri8z}Uu>C1 zb@S}W!gQqz*q}qa_Ac&($7u16>`y-b(IZ`$3Ogvtpgey3ec?Up7A|3sYdzB+E60sK+`manLZ}p&T>wUVz%iajmq(Bmyjzr4udHi<`favm zF4Qk?4b4<}Cqnnqrkx#edf`rbB6p>`8VjZPuVexYa>TV=^AZiZ%*jWqHfkTPLNXhz zC6$e!nTy?cFX*`lVw4mnX6Y#s9?U;4Z3y=1N(Qc#qQs$z1^Tx%;zkBpNnsvrB_@=| ze0X;A4J}$=x*CJHBP8w52TwU!>0#xxu+u*>S<cKP6y zqVeU*iBn59Os=6tyrOphx}EZo*Cr?=@n@X|m|8^%bgbs&ghT6xO)SS3-7=hBOxTx1h=FZFL#fXjKUu5JE89T59Wz z5dXj36nG#t32j;z$iF}5`Ty^qJ4E2m0Cb@dN$(Qq(P)0*p=S=c(lNqci+ttm)9NS0C?!^0IE8 zh~n5Ez&yF^7wE8cRe7wgH^_{*&6Cp04M68}OiI7+ZNCk@Ob64w{~0;mEHEjKBhLc8 z%(e8iL~l2q{^hYdD_lVEp!@3YG)ju+8FHsNE0m3!Gmy@03UF~|&X*~GdWU{X3VjX| zaMW-V6ZWrHbDA0GR*=M%3IKs2(2wF1P$$)jo^Dmv%E(duH|Do^!Bq6$)RJF8+Jme& z_D)SU4;BJK#s~pzp$4ozWR}A{;oDNoQw}g$D&6UhQ8OU@DHU|kh#2FtzyAUx^r+2( zMjpebKnxYtA4vn|5%_}M0h53b9huN1ld#!h;k!}FG$cB#C6N#=X*L0dT2t?#Vtom? zW5>XHGGFsc?D()m^GWYmp4}UQXJ_paO@o{Ji$DHNlhAr_vV{7!HBwWD?TIkDo)3rH zR~%_3(LaAPF|V@(c0az%V^HdOQHg(s09N`MYqmRLZAEnVR031=BoUdHrf(<+V&T_1;3D8eft&J>rr5E9uor%H)`FguWl-PgrxJT?T4e5<7nF03 zgXB()rMC#fwsSxG}~Hywct9GmF*N4%w? ze?NiPEKyC-!3XQg>{y+t6N;bs0sCpQxq#mg$H|-Ci-THVUx&{o+WL(ZE-0*P>ecc5 z^0u=#;4g+Ai}a3gU+)nEMW^vFF0c&KsqJ5(4KGr)&pB$`@oOwK@CKmy@a6gJ<+9Hj zm|(C}f9!hN%Cf&>tpNH9vb>e7vrs2@J{t@v_=K(%wbds~3p3UiJ%X4^j7Bzu#p6xa z1%R&v<}Lf#9Pv6x-E!L2jn|fgV?pFnN(NAiJFQIxAi3w_-EKMBHwNU<*yTv7O^LBCSx7SBKK#0~j zg%JDsn(8oU6VZVIEFkxMvqgi%dj=*^9Dvp+Dci>6iUCR9DW^A`ZBwk1+!x++i}xZE ziSiaV?mMzS7ew7B zk3(9KUG&_P4gV%9{_Lj8EL}YqV7*RAlgk$tI5jt6oyR2+x_XTer#F@Y(RGf|b!(r) zhrk2GM`A#j8E6hE4brcMfnIRjbJmg2mp%we)?me8@=DPNkdVVbfV*#E)1J7KIQUkG zVi{0{qBu1#69cyoh?mO;Zh_kYI6OG-9~{ZC`t_xWK<%G5>z~~E)Vb5$pU43_!=bwL zR=Kj&QjGEt4%G*$=|zgnL#`^abs*bLsE!c_v55^KgloS+{8#1I{(5!2j1-l*`lUFH zy%WGN`aanK-Hbg=%=U13&z0E$0MpB-6#vZu-r4hYAV6j_L;_-sXVsZ&6oBX=>7JAJ zOzr+aB+DIh287fF{58LC;tzlZ?3uWD=&C^#mOS3FZ#S>I^$5{?D$tT!eYXU8ht8>O^)Q?4tcx~SWnnk&M=S$i}<=&L%!@xJ1|^OX`bw=%qD z;G_ipJ3h1BlLHp1_W9~S|NH~i-Q!3-_Z%A>NPY#RQL^OYh~~B?cfjheo{^)$=eQou z@v~8hCFmaZn)6z&fW2yJGgv${3B9)13h@O~cWS$x)178{9Nl?R+myQ%OQiL0?FVi; z=Nvuyw>IqlXKjSzxi+3(4vA>XC%QBo6`3qq8<^zGeqck5D?K9Ov#Pq+2p)$i4skW; z4dAGH(U5n+FUGjw9t$a(q{qQqDGV?$rCzoHy=_S^5X&;56iBTuK~`la8gKy;)`ib) z(>>bhDEWka1CHuvm%Z=L&SvcbUrG0rdJp(2OxUgdq@5u%fYk;k;i(CB{rKk?JL}G2IS$!=Q!M%TN`z zM+#s=5wRgD;U&IKNrh09f->HSf^?CULtMdD*>|oc|Mge&Ar`EG_)|mE46fj(0jDTp zxfnC}a^hLNP~cjIsNhajfjC~A%tsjq?9R<_zBu6Y=&zc4OP-I5Cj8zDwt`bu|Ho5t z7&;EJw?OH>Uj;(?^2ElTaVl5!!y87m<%yPnWYufz+ENNj@nZ}F_7vnjE;ZeFkRX>e zbD#w%9n!A$@G>VX-ZO)M6Q2FsX-;ustGb?SJYZITd(vqDS1~xaLk(lw-%fw6ax#Asq)8|!r+|=v>4@_TB;D^@z zKrCC@N$lDPC9o(<6{I?Rp)jQnhkne$?-Be10?F#P0^?yYlj!82TJl@^G;{cxGPnI< zE-1$P1dmbFzjxyT+0E$E`b2u5+^@ca_|$zFm3G*5v{26u=;uX|0Uq0}Ec}v1)|bL9 zgLQ^y0HRPlA8A2jf!Eo5zU?|uNsOn_h)8&|$M+};4Ji07Gq&+Wf%A}ce{eqOQn*9y z{0GLSlPmoWr*n2QRdg1!4|wGZhA5w0ph3`w!yFvg-GTLIJgH5lXS1ySo{wc>Vw6li z`^hiWp~`-_G!WLfO5uyU*02&ef5Hutxx3`rPNvg#Jb||%n^>7Ik?S~nKs59HrULQL zhJzVVh(|u=AQz75+b!!shA*g9w^s$)l1YZ2l25_c;Z-RAr3RMIV#V2o$QPlPDH@X< zP|Q6yEFNjS_%%WMyDNmszRBHl4kP4!OG zVp3qEg^BYB-~$Y2M@*q|YQRM08g`CKA@zU~v+_uOIKy+k? zLL7Q0Nk-l}d5kesL3ch9hC@73IM@6K%)g+B{SWkmIEI{Sr<1ygM}UcB{`IQ=U$@Ne zO#B6vxg0v@O+-IOu(w#!Hja(4=JOl$yCLd{V>~kSf>_;S3!v}S@N@p15Kx$^LBB*u zQ=s!D_-E8d)WjL=-I`V%7tLt2nEe)zQU_fXBt74}BlMQ(=Ssd%%;vSWC&$J@{!`mk z6mTRrVYLeDL^66Q`56M}xiko^qy}`Wobx1n>-0(Obs@0K9N0HLbp(IVFxY+islEP1o_Ni4E$KQaMiFhK8 zZw2Ca=d*}dK6qVwc50GILr;DWEdB#hk3r%dk!91ItI;CgmSgGZd-1`(e6$P?oupwL zc=)dBiiuw~guOR7k)AqJ?%}D!L@>iRG!u&FY%`#6DfUq>J#FHzEK|q9e_PMY!KeGh zG~l2srCYqyA$R^GXWg}5e)uTjW|&UaXOxp$Uz$y(iiQE0Aq0CsqyenGLXeD#JUPmp|gKCNr=cbk>ew_GI6%qSpJe^n&TK9&W zpRjlIfwZ70WWZNuMm?jY?hNu7n-rswoxAclN#Bedy{l5hpq)Kn8bhG7u~(9$`m8|G zQ;CafoA4th)_{3UPqsQ%cyDcxY70*s(a}~EjWF59v!nD)cuAGH&x!<{{>J7@auiqA0k+r-DH?{Xz(tpU({#gO)j=Zdvd#~1-?ah{u^BMPBELvK<> z{C6509#%Qh2i1kwsp__JpcEK^$u$NF7b32pn%ehaV8YeT+#>%$lDSv}g>jwyS)h+t z|5Dp=lTGG^LN>BftcNn5At*BY&+wDuGJf~URMf|Pn~#>ETlN(Tw$n2%gd3@F_2oN1 zu3Cs{bal-kL!}+0*>0O@-zaL*V?g75$*tQ*MClA=l&vn4tuP-26?Y@!YYK~PrQu)V zC8Jgf^lRVQO1aWN_lpatq$Qf_rU{x(Gg;H_eLI)^cIag~=`z~6*D-~}Ho1uVEXks9 z&_ns$>+d=ij5uP~hLco7)UfYUu2y{B96#r5Pxzp{Kj#6AO+EZGZS<5{>8e_B^7b{@ z{1>*-Sc8s2?SFKz-yCn&IK^IS^{wMgqGM7q2Ky`h2N&PNCOf$h$m@vQnmT0}e&B_yOwbaoM>|QcO~+Z5%1qK-;YO0wQ0$ znOeEkcqiq91=no5v$6No`gXMgn-6!f+0mg<+jav$X~tgY&gf*Ff@e8UvU0=nVJz2= z>PDscpS{o}3%9^{_L&KMpey(fRm%l9$R-s!u+~o?<<*tA@xPpI+Ho5QZS{NKwo6U2 z$X!_i%&g5D{Z_7~Rl3OQTFz$m$`>lHEq-ym(98#ubOAwffDZt~ zPcxXM&E&Q-jK_CN`W(4Tc^QnEo>x4JrYmr6xk>QnriOt=4I<8tv3TD|X#A}yeIVX@ zdiOmSu>zl(37hT{##dslA3FxTKpkUa1`>Ol_9{?g%#J0ohSfFC5=!BG7otEKup(=u z+SZf?@PLKa_wSNRu0)OxpWe=V@h77_=Si^Q7k7YHkg_LJ{)4lsa8GiHsvR5L33>cT z!Sz&s#`s-G2U5?@F}OpkUtk&HP{XmuQrIiK)9?@bSe>BxU_YzJyUff=2*-w&zH*x1 z169pRVpcK`)VPDx+tvLRt6kQ&GWcZ(E!tk3CZ#}=53ua;EA$jbvcFdiXp^W(7A)N< z(GqdPc6d#;ncgz8u*;p{@P4^;vw-tidr(I{a_ia87M)cJ&pn}k_VSj*etGfKcW%+T z>ZJeG0iGvYsdfqXop;Nx)tgjzk>*&xncuMX^J!f1X_rLF@{VfXA**~3EaH4(#s z4CEn4_4OEcAe4GZ%_+#*X)g+y-Ih3)nfBdYULP|zNi>6>u+xaH_Eh$!7(6?mv5!0u zS1(2m*Ke1rykTcrt7|-qP96$0|IL^FdO~wOg_}*oe0p1ComgT_sz+m6{pJk7_zav*Ev>tDwY^f;Zwbq`nv zb%NVJWI<8$?tC^FJL^yx%gk1J{>hu>W=g9FltUfRDT}a*=W``JyIJ~e;Y0dU+uzrv z;{tag$1mgW-QS*CZxxBsaBZCM-fgfiNRxB^-j-p%nSS(${bQ4V^SI?Ws;wNQBq?F) zG4I~PB>z}j_Cl?08Ou9Nx@sy(;VA4I&We)oox1mk?H}M<4&XxMB?8IZH9_(kaFcf7 z=KvdCb|Um2J(E7zBON>ghgreCAkbQ?tp>Pt3xJlK6Mq&US?SHp(w5%w%}ALk%qUG^KO#G5a7ENggGmlz6&%N78vR-ys=i?^LLCi- z_vh_vOG=p@p7+&uV0)r`@=Xt=WwKnaNM9iR;U3ss!h!-O+YEl^%1wU&A({DJ(GbDj zyN;5;PRcm2{)buPzNqr3@wI8*>`z=rHpQM}V%{R?VzV4Q0rv7b5EHO}CNoo|emmtB z2>Mo*#0;6pAc)(?j2ufT;J`EOSdqyskRD>n9Jm9r-aT-@)1Gg`LMJb(7T^HOCvTPR z12g^vYx|u5@NoEFaiPvvGZ5jMZLS^n@V)4$oa?Sm7kBLH1!!B ziGO={HFJL~_C8_GuFy|`p8$7m)9bL^rrjZ^+&Vx~Gy!Dx_+rY-V4&)A!9!!_!07S6 zIP`XArG@O%6A$(#9Pl}j!?U|m3OX^^k3F~T9XE`MTQV=3{c_fJavDqGyRvm1j$#;^ zHTU&z$iKShSY~TAq_r1G@G-UJm84 zzvl#@ea-h3&H62$XQhuD37b3>OD5!e@&xstEzvpsK_aD+cm4=GPFYE}U(iaGMt-rs zN_WM8UaQE)!k93;o}~GcQ^k=1!?yFAQX#AMTzjAjZ{T~x>liftk+t@$FmI_4^w@*^7-C7c;yvjCT}zbokDO_}i}rI~>>Tc3 zNOxcI8oH{}7E9zCEdvX{E+jdQmbE{3F2l(cwgITO4w@8eZEh~0(93k} z+to8HnQw2aP&o0xkw&lV>q-C7qXsrq|IK?pE`Aqz&c1*AD+-!PdG5H!tb;E(Nz8cA7Og+?_j0oUYv-Wo^1UFzTG5LwVvptw4?d44dIoi)RLaXie-KqAG4Aj*1Pf!4t1;K8+c1M-%x0P(qqKRb)-4Fl;(zJjyl`VRD)3GDpxfzBkAF$4)-3zhJe5ATn zX*CnJGmAXub*{bGTi=_y5PtGSf^Xw0YCPQLg~b^cngRWk6-DSZ#EHj$XI-|vT}b{>B_RyN;XT`tzh+NlWdzkX%T?R_}m)xB6S)tWFVL^&#U10qy_Oc~8dm>-kOxOdd)8MwW6}jQZB6%RQ zL#UrA{l$kdAC<|B?1_d+xu-X@wiN@t+M_hnO6qhbVVmj0!`d>bx6`)#){AGQI7L|$ z{+&jrmi7VuEa?XzF-;J2{H9xk|J=zn3;4eIU8z{^{Nq>$OB4d|%>x`x{yWrSE?KLs zSC82r2siOBRLoxGUPkx1gt;3WtP>n}!Ow-lqOZD)YWmDZV$OH4mGXyN$XNOz_#jh9 zUc3()jRQmNaV0!&!{i3Yp*$T}92T$N9^WWGZpx&C9d4* zbc^M?E3kIkgUQMUm(XB$z^w%a!wZh<6&gYHpKx@kXO^Rr~C3)!Q0UReIlV}6%L>hn&FpO^1 zlsG3ICLF(z>lHfkZDhFLM?n~xjjKgOQzdH%pp3?$gGEmx|ZKe_}BZF({%tei*n4SU10Mz62d$@qe& zsyGKcn%ue-nB1EF)Y>2$g_Ke1vXeWrhrD=~XtL1fmMcnb2%&9kj9yIbZkk;sUQ{vO zPN0&V|0B0b!xW6yhNGCojc}=_HtKvvd+4s?&llc7gI_a!w*)%rSwwp7Yhah!jSGtMGi(Gu2st2` zP~)H1o&T3fyR-E&?7F8KNMNxL{ycG)R8kO6a@Kb{=w3+{Fb&1BQV0x~pKln#>nY_`GJVn*l?>=MHPV^x;Z8JIAgWAPtvHQK z_D(ny+>jf&hWs9jC8U35@%A&5(|6F*h{n~Y;C(F54vrP^xwew-HIx55f=T4IYfCG{ zIgzmfuo^~(j~_6bgHhW(SmQ@`<}uey^xzv#KW${yt9j1jvpae)sIp@*Ez^>L6^HqN ze%|qleKzZ$*XFMseTyUDoO<~y%_>TAh+A&={ZqhF>L4ruN2`%q4E%Aw$P+#S(`CDh-C+?=_722|0f)kZ`W2RB%{!Cc@G}XHz zv7kZBOl`gMR~sXE@4=8cyT2e{B7S2c_3kl8e2$oO1ay|2w?J6Ge~o_y8V3vFbOBC( zH04f~)K>yWT;3tY)Da70k6fv_U zqAtwJ(cJ~h>Z`qH>4x#76*z=#YsN}Rtpc}QU(U|Pz}@fWIId~S(%gOUVaIhBWFELv z@i3QlY0!};ptz*{l#QqCYdiDAx0e(%fG}agQUR;Yq;GH#sq$&J`Nk0?-Y(Dr*hm5E zfryL`k)SwdBu;2`?2REj{$iQ_#u?M9A6QO9ZCdO*wLVN_DU&RB^u0f__E}0`JSZ$h zs1M(NxO!-h{vxA687hvKN%wU#su+*Gkf!W;jpCj!G*an-Y%yhO(rQ>k2{SWWlc&*} zxhQ+$#6>46j#GnK>@?v*{nC zNco<0n`;$(dh=-SiA@4|ppa_o?fgE_ht97;HC|T+#DZV|ReZm#K$dj$64-zqf%E7dlrno?oxsw%6{ z%!Q@O2unY{ex%>q!!)M~^)ZFa!m+?W8;F9`8M~}(pSELRG($f zv9MkmBjAS-L2YIl{20w_#^ed);rNpv)<#&IMuj@^P{L5{)WP=X+PB(o53EdQeD`}_ zqY6s@NEk5lrLy{>TfRphXIh>B8|>1l`tMpn7e8|*OnBXCSeeJszM78+A^LzF-B~n9 zL%fw+0rxI0?`^*rWiRK}#%G&(__urS59YgVeG5~D{Hza~cLw=aOv6$A)nos|%FIzV zLqfS&LDW}@{Jje4eh(RQhUYJ1U&jjnH9c|z8Z_^^DtzBZ6j=XPe>>MJ_M4{+V5)1XNg_k~3ypa`@~px6zet~LZAWQF!EHyefE4-MUG z_zVxufz+j&sCxf=`{7nd=PP;_t6cgXGG{`;h!@#;kZX_e5TLT;VwwLr+`9krrM{X| zU~PqEBg1P>^qm6t5nWa59Cp}1)2#q}62uS2+S(i6?j7K9(T`b*qO0(v%$SJ8AvO;X zd~+}UhAp=+k>JY0ZTM)6X6P}_JO&MvxOU&jBP!6`OEYzM4l0ofF()TLFz_}CD=2gU z5lGr;(of&2UcldHQmlEvllDmV37M6$Hj>1Ndt^r$f)7X7vVw_eP-_JgdH*~`{FfkS ztY;gS3AU9PnWe z_LE2C{8c&^l0f9+c8OVpog)f*#V~iyNp%e34+lac9&D5Gm@1n)mXK>d+(p|<_=Vf* zFi}LT1(ZGH98VHZrbh#B^ZtSSw2y77e|yTX$-dV8hgR=s-G0z>B9IS<50~bxKJ9%H8VZRQ ze0n}REW^`YqkHLj%{3|ciIrM`W8givRK{opf*!$@UP~TJ`v$qPWv=ci!niM;8g!z& zRmbzaP^*Y1s2^(9ANS&*k7k7C#SZJBpUm>jG9sK+Jg%dmmcH}G@#h&aqAMc9JRArQFTY~6kW^48Z? zh2I;mBfMr*_8)B>>3Nl*Z)9+>87_Zj+9~8}kMuSE1wro$2hxSgDHA+EKRbBJ<`)sW zg!RyfHO@%w>)j~UlC36o{?NAvQJ$nPp@n2edLr@6tNps*L~I*a0)ZUpdxzuPrWKgR zcMYsid46)8I8~kC3?LtoOO16HXa=5{-LeG%Hp*6!Nm2G!*^Fu#R1TL8_{V?<5B3*x zSy%T>+Z#`A`&Z@0%xiO~oZzOLpYD*El000JWruw}EAq%~_ebr13FEyEQ0dB}#e%9m z+wKGK=h3=1enh0-m96i{76Wxu|J(;3<)@%0UL2G^_HIULh9Xgd4E4hx#^54pty>|h zPo9%L{qf$m)V7{omk}~_AWQS`#YzaJPJPi;Gz7@1_?7d4R{NHR$r}%9?u*kh4uZnF+RiK|LB`tS{qAkw@Ys$@o28-f zhlT2uKYlwOJ#2eN7ixp4fGIJyQfSAV0O5{}D<0=WcXY>FuM#p(6&d(Pz}gBuUWNuE zLfJFv=I|w2>6!XhOaxKnXsE7=1W8=Lt_>Za>Pd3O!_!*GulW((-9$v+28~X=r~-J) z4OToL_h$Hr@PeSMLaa=Ed1j!H@i}~iHya20+<;*kT6y!WrtWE=_HP`CI7w1Ya1He; z(A`?7VvBKRw9XA|ft_1)%Lh>TNuoakc%stDv3MnR8>)U0YL3krU3w^JPx4g<>!BkI zA^NwTU!YOtA*32%lwK{}*c*M?%Fq-u|Gnob!2_%cv}s5#a&_0lAP|Xp5A10&pou5? znP4z0h6-X($Ptum9`nKL6&e>9sFbowRuy|7lIHqUK@Qwe!n1=Vp6=Lqp8 zg1TgH%)wFpf=CBu0fzs`Z1J4$hgWVMr0FljTx4xu5@zi1_lQ0P`|Hn6@AD=cE^$#X zqizjNb|{jXew(e&E~Z9U*U-&Otb9OxV~XofYQbzHSStihPzK+tlZ~3|rDmQ0$ z!S;^djYg5jRX?YNOkpHE*70SM*UtvYRi6{Ga8a*V)jVL7(LYYpHHtwMKh>mg+=e8f zAPTfMjW&LY;48t<3E7XwY|)Mk%lXZVWFSz@puux=A7&74iAS~s{nEl|g@+z&?~08a zcP|_Gf#*9hXXH@CTEUZ``_3LJ=lAIM4eC3L%MS_ZJm3bDTsn3?rc7u#N27?RFNjmX zc7O)k0T=5CN@-VXV^wav8P<)m24Anm^(V60->ghCA-Iiys=4O~Ltf(U>qB z6nDcohvId#z4k1r>Zn%>clx6@=DfUXL}eUPS(M7^-phFI@r`bOa&D40WKtM>pkge#gK*8QaT?*JU2DYE({>$#eAzZ0VdT@5cBGuMw~@Wq zl##F5bc?(3Z<&$g#Ir3#_+>{#yg=JFS?qeIMB8emqeVH>S5!zMX)o>smkd_^jt`?Q zgwoDL2@8|Qz%&zb9Ln*YJ-3lazNpliS~X80jYI{?yG_3HX0 zqs2gTHTjx23Fi+zdD~ZY>{ZEjfhu!))Hez|j@*h@+g_alyL>&$@J;PU4H0THtIRuW zsBOdR-o_Oe)l2$7l+WLZ75@X7{ZH3B!6@CTbs6EOUBfxGoY&f~*!Bps`lJPu2-9ki zJm)?^sNL|O=-hZKD7VUFX=fm-&}s|Q!7k>lFjKg|g~k}$BW7(ntHV1gS@GLH*%FEe zkQW!Ftv<_&YMJIE^?7M0q0!~gr%KxC{>f8BK6qSD?aR&x;Q)lc=x;;2BkcX#plfEv zO|GkdGA8TGw6>Ru&Q?VB9^<%Y{aMe+px3v|oe2$Kg|N8ji9roXHv_HSYd z3WtfAYkv;NVNE<9_G)2`7QY(IJ2kj|=|Fg9xE<*%inDll6dl?xBl{|c3lbW?y0Tyf zY&Fas4Ki(x+_?KcWcxdTYK?y+*_mn5d6VT5bJ>0!JKo$V!6A9sL$jK>=T#5?`MOOR zRN(W2TDo02792Qnxt?=I?837Be!?!*V&{Ti_){cg=c-?iF^;45rfkBCa3>#p7MW-p z%}_F(XhYnL(S4hp3+h4?{;ZC}B~LdB&?ORrQXTqAf%D+!=4qwY2{XQuZlh9~D_5Ue zmtRLdEPzKREW&SvNZ0JWjYz;!b$Fwbp34+$$X<^+cqdGq0R{@h9*m!=^aFvting(; zrgESs?%oWG6864W9_EX1YrK0={6mn)haO4t+cD3;mmQ}vHU*CYRK_zMVM^x!tXJZF zzy$mvIDAfEvh9;xxSxr~sD5>^mr>P^y8-kaYrUTu&1mmvk^W=begWQ^0VS~{y-vbo z0huMS`%$RGGtmDJ)z+D!`Z)Y@FsvdVhHI+y<^(W!`0*%xV*Zcf(&a!1AY6aDk6D@w zK$SsLIMJ=A~y68{HK>R9*!@D=ZPA!DrxJl*v8Z@PT{)n(BMV*%=(p9^<$ zR04(KwK8Wux?}y3t`PHP4Q8f)g!2`$5kTfLpcuSVj=&q(Y-%qPyfXEL;<4t!UFC;? zUw3jbOEdwRK(iju%m7SDvs{rB|4332a6O}=KtV=hU{G+!iR4#P4^|?+1^DC`eHwlQ zJpxzhzpIJCaX(X!ejUsG&wak4C83@@t_y3qfB*bduDKY()xzlh-`T)Hc$I11pJtJM z(2vb=h7endouA*7xWk$A5A0S7#DTTsbK%mtIR4(l+{zHpQH<%TQ~LkpzmYW=vWjt4{6dG`Cz450q&yI*b7x-4a9o>vHr(x?D;qq_j;tn%*U*kIN|s(V1_c~ z_EY+EwSi%xRD)GVYOUYqVRtW4Yn8N2>|6eV@M`4iY5=pYY^}-m;DBVB0pFDumqkRYPX+T)5 zJ$tqyVVwNA^Qy%z%j~fVvOk*UKXt%gFabwv=iIdAGiLlG@(j&SR$sX;@LXQeode0; znr^+d3aK`e>!E7zz3-{?%(Kw<(uS}?aLXn9kJy}utt(*Q$|x6QaGG?G799uFt`(TH z*|Xcu;O(rRtY&6rRp~tzdWZ#HfopD9qgtzf))-FQ9KAYOu3M^;6^;99=%ViX{lIa@ zb$9BCd;xxJg}pdxmRBTgI$IW4mGpB3c~{;*J2Mm^()STp#8&l5ZH^>)0;=*6E>&wY z^~cr!UOkSuQ2{aOiAsU~I)G2Feisn1J&jpiZO>;aNa5iZKD||{{f;v<4>yt0N_kPX zn<3Uo&+|RSv}XOlY|`Q_DOUpgV*L;h#3%F|IMUbIAN1j<0{feeb!%u0 z@%4^(dcO;D)3bB-)7@~|c#z}rT_V|O^ApI_mn^O5y6ZReeIkU9c_}-~wI+T;bmKvg zR#a(HSm0%``T5;m5UjI!V0;wi6C$^Ldn%Lh+2HDz^*~}aHIZIXy;S&0%2cmI!y+z3 z0XRKq@~HjRlpCd*)8oB|Ri`>@)2;XhF915l&Hrug?KbPos1@FE-Cft0Y-AM1;LUlv zNM#)jxa~xE4TRZd-bcCqK0IC_#I1rtQ)tuwZOa&)_1-LX>X8TI-udBEw9KB`OjMxd zB;Q1rC5UryGEKW=Ytb>WiWK{YIEw;Ywjj-=?vC;-NFg5bWQ!zb17K8VKak;1zh(pB z+?qJcLD?eCtf6?E+#N9BhKk&kWF5781_$?0??Os|^%)AZ0 zQTD*)q=}@SOC}>J~P?ov^#i`f3Wlg}Z zZ6*QR{B^HCuh&ud)Fq8|RQY;BFLJ>Y6$NCZvpvw_I~p0eUHqSXBZ|{V@kt(Bs1DrC zY(|jiF9^L$y#Vp{1-Qhgb?~CQufPBfzKnj7o$sP!CXqh78t1k}SxE7J_4b`nO*QYn zDG+*5kxsyZh#*Lp9#KK1D!m01k={WGHK-se0)iCjg7gx46G9XP0V&c2Y0`V7OY_X; zea|`fuJylPuV2g}JG1vZd-m*^=T{ym$J+7oN^?)eM- zdY^9?1i=HVkRaM*?H7`>Rxl0&qQWai%^i4iVgEjO!n5uua-OP-E36^UcI-)Hw(=o^U$Y;Zgd zHspmqK_*S}Ah)J|r1OE;E>9_uR--!8d5F}HC?5{LF5WlK4V%Z-DF1M|)VU4W=NGyY zJ2nF|P?=bifC-OjwqZpB%lPC3`%SHq?zBAGTSb-Gpd;JJUEU=jQ z6CiGsmMj8@Q1_WXYkY#sDR<| zS2&l0e6T;O+Mc_bV=-)WhAj@y}B7cp|Jify3qjKs$ctfZ@^d)rw}@$ zofe}NGwoopXCl4EQSO|M@LOmt#@c1ZlpeDWKbzbaam5dC@a~WMgYx#x<|(I`ekr73 zVs&&=&djch#7<3o$rpUn)lL_}-rhR@jBTd0O(Zexur4=zlgBB}BH&L_AkC@!>;vWl zHh5ZF#0bG}T8KkqK@#!Sl5(mYAQd&sW}Q!tnc1`b=`!+N2A)S8w0Wd#hH2#iGmq(K zJZZ2O;n_T|@E-v{7XRumln5 zO&c2c8>E(W9n&ly0R-V_E44CQ`*f$~n+VFfEOm{z}WGm>GdBKivBu24h(T%n?- zw1y3nrZj_|@RA;!Ko=4%8ANbsj_Qeb`U6m*17lsweekXqccAZ`w|0eH6Uv|M0jP|* zsQ}E_pAm0$s(;9WYlDB|!*32i5NU~44hMYS2r%aUIxN03zz?fk7ijH`|ufi1*~HAE)lY#>(j{-+jD`oI(}U2PX2b8hKd(>HcN~ z|89ViX@(s8m~J?DWu++iDTt}A+W;=KW2nTvSHgI)zybcIUvMh!uGiwuVSx6>@4Lp{ z2#bDA6UFWwrh;ZrO?^$BDi~3EIppqFZ!3j$@tPJ-Pt@rQJsy%4&4m58GCB88Jrf~_ zd#`Ay#XWJ37`7*-vS^@mpnrr9`Ryp##4Qps1DIjSM6g7iaUTW7l(+x`n)S&8s~lGE z`<`5XcD|}9-J$ba_&kh=XaBq~;9Vw>(AR@R(8HB^=*jT2I{ik0A&Olhaxj+SJ1W$D z03-G6e^7)Z!DSj7w9b$wFLyC;+8U`gD1<3E!rLH_sC?(wOPemTE~S!_1Mw$VKPOXB zE5uLRono^+=~2Em2D(AI((g)RraYcZJ94ZV zR=&GY>CRBe(u_3yIlV`fi8I_GBk@35yNNyID!>dSbDPx$LEBA76bAwc@@-Zz_ZJ zQsTD#<>rA*D4W^&_3GC=ve212M9k_NLwCGDgvw3T0N710B9E2s99zcVD_TS>z^HzH z=L+h4L~o6zAJ~dKPg3JDVIy$E)@AT8#n$P&2-^h(zi?}2p$Jzw8Lnc_^%y-$;sUo4 zny=|0JR>iwPPEUeqKvum0(+nNz}2E}6UbJD#|n?UIN?e<^Kwu!dPN)U$`8DFm>=6< z7C+J)=98Lz^Ot#=UuKC61wGS_x8@=D*>%zH4D}t2q_+eyJbXczR&8P#;@L-NvJir=|#$BqIpFz&Oc8@ zGPRhxOBvp3itqH#a#A>9%M@GB{uD!}T!^rkXDf2@vJmCL#kq6hs{!*>07g8nZyBB( z_im48YRxZEAV2Vpg7>kQM!IN`HOGQzwjwrzNe|4Jdm%6i5KTD}%Pa#$VJ_BXiu^iW&vZB_kp4nW6YNM% zf@K~J;ockvzl#QEOc4KudUwfFQc!vAXEIA^Uv;acw!Vjd6EXaLIWKi}3gbt0c)-&f zj_KKtR&&7P$(DieL~jPp%EZg8BZImy$s8(<=32k+9ZQiOwp%@cvxOxeDk&yn!a)ZHEZG=Fv^2w>bife=>{F?_5)RcCqFcA2%Qo6)RrW`i@Mm!ENTp16oQJ( z-nV}GsiwR*W!k#xaYh1@#%S4_Uvd~Wl>+b$uK`NQS0sU?GOaU_x_G9+E;C~c_hm6T zN=e)H#msj9fofYi|BvC-p1ZY5%jN#S-0Yc8CDKpvzZY5SzyNJ4Bb!RnUxB5fMqDWS zpLP8$Mx-?D+z?g|#Q`|Z(*_I`sGH$3mLy#*f$i8@xFpO4 zn|r(S5T+!m*R?vG>+-m}Hc|_H3khe9N>61of-7ymWsCY-|E8j$OGETp?Yxzj*DfFk zOMLXe1d1Vs?RIea&7R&nCW0(u&kz~9LEq{9mVLJJupb_F2o?{_*D0+stx8!Mr1uRb z&*YUg)9(OuzoRCDbLqh|JX%+9Fw5~|C95ejbBS1eV}0VF=%|(B;UF&COYeU1Ua=vmv+h&?z(^Esz z6TfVKN#Fm6Y3pg(aA2ddtlWuGe6M?3%9$oQPpDRlE}cEctf@Up%RG6kw}_rqxg?Nx zvLia>UPX`b zEt-Ab8U=W9a^Y9ZSPKMAkeV6Vbs}EHljocZl#vZF&j>5Eefo0n$A5VYvyYugtwMP| z;r0K596)Ls+u6c-t=APVb!YX(i1r=Rv(x5lA^~G1!F6ujm3HK0d+#WkXr0;pYr4Q^ z=iimM-c?aW*vA)a_}B6S@HPNsmfpVv-w`Ye5cxJhqK(L6({#bFLbAfz8?E`=eZkfk z%}Ij`yX>N=&uZk@zfS1uqKoqa=XS@0B)yVIFmO5&I=qn`7B9xD4Z9_gzs+nO|xp6Dap-(smU=>hw?@KVK>o_5m^ zfZmY5hK}WZKqU_%l;4*KU8MP$klZsIsfD+J_4b~9UN~(|k)Ux37Bs5;szturx30GO zCJmF6H`Cg+kz$V*<~HO$*`-MPd(rsDrn5fb$#d2Ot@Jk>HhSGRB$%S1cMAydjlorVw& z-0OHX6O>laVWsh;!9JOUW`;>AJ(|3w@|XQU;UXH{9>o>gMl8_^H)Q-%mEiqT>wt?e z*5j4mEZvhMrYz?(XHey(6A-C6Mm)>pR$CQ4BwH!tqfb~KU9+6gl-M;`FdL8=7W9}D z^E`3Mi_4yd9J7_gg*#IqzqYTOr_y1F-2b)M7apI)0*0G2@s zqN46SR>|O9%)-Ldx@ut?cQA3O#CU+QeMV;cdc)!6?M#_->Y}gyj4c!%ytbKw>~xj| zFPQ|bRyOQ6(0mf1``i~aDz!~aMh8qJmyh|Z!r$rz6PrA4RnO(oRc$*;;LopRd6QeE zya(wqKBC?hY}&E&)Q41tC|xeYGL{58v6D~2lbDuj5q_vkxSwWizHb%3p0H)s!xr5h z4O%+2`g7soJ(yMveE?m(FpVFFq;$OS@m9=5W~Ae28HxmMpjRS~ArQh=G%%1^67T~o#6C!bXY;H4Hu^WF03vR z%~E$k_4#R#OSlfl*l*|=z4)$&-fwbkEv3{DfX(nv@e6UAP;XgglgSJZ={|2>4SJVL zvZaJ9m0W@vR3o@`0IwENpE|NP%4*q-QrAw9HuEX{%?Ye7C+kR>u{?hVAJoH)-bOW! z{?c40PV^o3u@ga)kXEOaF)1g-7a&)3GMU($!MY6F$YG0XH(dRNXao8cCV zHzvk6!nQkl1)m($g<8Yo?*zQ|QjrMueNl;gwI)U_^i9wBM`U`&wRgrshyu70^Mc=W zRA3o^N}sQYeYs|v9lU!jQhz$Vr8=o6d;z53RFIBV9qVhp^}eLu`T zoCK()a}Fmi1kq>#*|$9!b5pO~3pec%*^?eqb=VaPURO$^IcJ!_ zh<}T|D|Q084TK;{Xc_eN=Q5opsMGys7P7aZ0&4>FZN3Y$UOE9cjOx0LOiHga8!LZV zs?W)FXM=wAbWQ~+RW81=XQ{fh2^Mh|u7NC82(u2>`IF!9sJa1nI(cAD&?!6O=`%vb zE%QxsP2G3+-P^HZv$cRT>SWoOVgq!Bz5nP8-$VXA-KLip*l0JjpdQ=*{^1p?#iZ8loHvi`}i)O`*A zl2BS|A-Zfm(|$cFNItT(h%GGolI5KA;7+S0b9tX+l0kEV_E$!#N1yKCxMR4IWUDS8 zPv9{u{IISWbbe8Pr$djfhrgATq2IDNvmUzg6>$JNAQY{9m+dJVSP7ags-02VL*{DN)CUXoLLQ0 z>;iN&55!`n(;UkVDsM!s)vFlqdV7%~H0Rh#%btK1iFa7?Prh|&jBzY) zq^78nk#Q3+6)i6^`PagUYzR<_ovORNz?(D#arM|vZ<+U>JTH-P8FrFQ32T?(<_aa* zD?{yJO194Jb%y#`eCl$HPc%r7=Sz=+boPxS>mW$21MOXQRRB8BD+t>OF)b~=VO&^R zW$lvGD3JQ#RI>i+sEOKNn>zK_$PO}jvywlg15D+G!>2(CTlL>YoO%d$FZS=gOhZ7^SM zV)G;9U9FCKh1|jPMA_O5QvmtN?>D7)W{;~2zgF7)9`P_|{4XPwSmnjJc4^9bqHUtU zkbIX3xBA5jy+j+&?P74MT|f{156)sh+7yJS^Xim5C{V(Hf$9+`$9{G>%v0in7{a0{fP#a)cB3Z4czRJXQD5&?O>2guoC zw)2RfWW_pd%vM^ofzjRI^1*vAGcS;3X90g*%b+$66ilszP>lBwTYlBWuu=m2a(3<9 zGDwu$jncNGA3+*{g!@r9yVu%8QxA8Vm1kb{E%#J`VxqJDn@hnqXC9^wqd#VmD{uJ% zH;r}Gm96GV*;@Y{M+)V3$(||k2p)N9mtG=Bj%Gn=jv$fgB!Hv1kD$bX#i4=x|7&0c z7^EeV3}2WSED8TjYi8pILql);rEz`=NfVI6mDnvU~eA99r%C_c6e!F8B1z%-8` zf^4@<+6jY&pAcZ!4zhK2L6odFfYXdx|(H zAOXZzg1jypF?zMLzQ>>th`}KGW-V|^T9;;lYHmqH{?5CpU{mL>;D%@7gJU4euJ#nk zTo~_pA*^C5T{eXq1X{^~4M8{LBBo$N-e2$3hEPw`O_D$=l)}3@2=O)<=>q}E^nknp zU1FRxNRCQ2(o0M^q<{(n9HrD#-b=9(qPZN0c?Hk z7IL$-dbH%7Oy#zluYe(}Pye4i19j3;PtGxo3*C)xR}tfSX+|piVOHNXscw`rw)`GU zIBB7V8aD*auL~2-jg}sHE{)9sQu+pjy9GKCR9u^V4ee3)pwVX@5Uo4T$Oom@;k%FhJlB}!{5fEThwq5-6{o0+i zshS5Tcf5C=dP68gCz^!F0*(KjxmlTEWf4vT`Y`&hv)+A4(pReOH9cL;m=U7t*^pLy zWr=Au;hcM!u)`#_8lsgO3jxGgmR+nw%Uq~XAbSOif8bTaZ z*(7&RA>f;FX<)LAQqp7|TH2LriQ59d3Q_;pe;b(CW%>~#HBP0b9xU0c|5PS7hqH#) zv`Y`a2igrG2Lx85sCUt5=B)B_Vr z;RYU_O7qU_t1_7Tx&fddcBRGh>*j!ka)d_w4(C5=F&)0?*TYp~=)lJDcj;~FE*h~| z&h>_v-yyeZ(zzi4l;p`d?QsXT{UJE9Nnc6+HUY-V^b>-zATsxoX4$P-fV{udVc^O~yq}goRz7))Toy{dHKNue}r4Qpqul6*v@t3Cnv$IJETZ>6cTwm*oO>dr}{S zMq_w)P9T7DmGWcBzoG!D#I5jz)&Q;fT9T}gom>O`q3qbc^1y3|dTt$IM#JF5wj*4A zXJ$O6wubn}dz`>QHvYcetbFJO8T)w{U9e#!>jhX(8VjGwLw^4VposjR`r}AnAi1%X z1A1taxc3vG0Ny*}zc)OF?qck5J2m*pH$4ZW&?G46BBbD~H zD&(~Z4HU-@=iZ8#>6T_9#>+0u#EV(2-4ihAFHpz7Fu&;8Z9voK2}Jd?7tweFm*bc> zRVQUNE9?aXBi4mi(zRyDvXA)K90aGqLp*ZNoRSD=|BLegK!31!M^Y96iR1niEko9T zjSy*PV)XItLvU&5X+61i7D&K=+($y2!rSx(490r;=Cvz&1k;N~c!Fbo*SwcEBW_~p zt&PbWSzLw~x|*sC0c+8zpOc9F%j^(ef<&zk z?%PvoM?E1H_F!6i7P9_Fg5ziB7_C62azjG)gPFdvsO&$^3XhV{%_e%-Fs9Tz&X9sm z@dunD)1r`BPxGEX7n`GCTjfyiZEiO!@l(C`dvE*$(yM}}Sk)?DE%|ED>s%5BIrddg z%)T!NM0!!_`^oXL%}?TO!nz+gZ7Oa+vYH_NG973<$bGf>3Q!gcsKe$w z`l~e)zCFSWd@rAnCgeD7I<{*t%2zgWR>Tid41$^>7DI}e({5q+iUYxmA&ckX;Hg#R zGJxEcD-dL;$}a>~2rG1NJ)Uw}A@tr%0WhXZO0`qp#S?_(#Gad_pk``esKfU&Eu~#$ z9dZsG14@|XbJLYeYrsiIepHZ(*Uxp$>K@J|YOj;jHt~#iG_hw`Zq_Am&$Kk+I|SwT z|31KVK{St`;qJlZtnQWeU2~Ylhg(f<&|KCF=tW!$7bO+iK)Hq55wKnUZ(l>-I=P*3 zLCd8iV3Mhbh6HQbK*vDvaysV|w z_HzPC0{~^3hhc0}VD;q=l>=F$Ro~(@2Ft!~fcZu*PA7p%^#V^AkSv2Ht*m}Sab2Yq zR6L$K*i6uAktfN`FQvgU0k`Gc{RaDr^a!|sPOU@0-TP7cji|VN$237<17n;*!LRrG zUYlNu(`9vpWibfct($U&6Cn~14mS=8?zaU}5Se*UN&8-rGNBh**=Q;J{!Ak& z!a$uzQrekfK!Gi=q?l1LkgfK|5{BTmkAV2-J>=3U%iD$+h*<^j2#?_*5cyoJN?en( zdCnjEm3*f)eILjneZn)9$MHz_^J2@OpkG132(-2gbTCz|7w-bH%B@>71w&8N8cNl) zAXCC?9X+2N+k_m>j7luu4|lkUl5*^@|5UM6lKN`j88~ z8gHC>ch*o>NG>23s2%P=R-yt)t|}|?_|5=Xlydp0_VQ?@6@_iYD}42$ec-70bYBPR zwg&CM?Ectb+`RDsA#b^#erKTrvU`-Mz94ZCa2V=-Hve=nKzJzDO2LUv4E;E~HPoqSJ$@Zg49TaWw<=4Dl->uL zyfaEn0kY`%4Bi9Vl>CXyn_aOhW!->TJ}d>ww^Z^TxLJb50KFTuIrK&{^!$Qmp=yvp z&B)F^<~O0Gz+^2uVDROQp+HJa3Vt;@b^ksnE+=NS0-PBT~v#1`1+ z!=zO$Zie}Iu)t9y4`KsH;ntPANyFEo1sE>{Az$`UTb%4h@qIk2-2t(k7>K{x#eT=$ zrg=(R7o)itWb{8$p~>j&r-%GvQ^D@L0NwSSbugjME+P`+@QicixbsgwzzNKv=YMKxan1 zIZ5%@%!%G$ITOj43f`k;yhx3MGdku=y`)*=wBzWs>e6B1>3s*e2C zM0C_i&wi-e0VD;IUbFMhEW z5GKkC28I^1LmTKcJgP~M0d}IX*G~k`pDUpwzdM)F15w*#<|t~z+(|!k+`KjgUBBvN z$34`+qC`7So=Li5W~d~06F4+FZyCktTu)fL(-*^(v`OJd$}?g0GBiwG|9pdwYN^YR z%a$wUBLuh(=vIq`mOi()I&X1uXF~$>MVIPAUq-@HtH-KX-h+++MNgz(-3ua;qxIt= zN+Hj`4#q3Gcs5!iD(LFSNAAvZ;}V_Pf!!8A4eKoV^!$UU@KonoTYKiH=Ew6{o6BCe;?G?4j5J0POypC!`6E*@%y*fq&%8NcRyU=!Q?pw;mpC> zdb@?UIT+kq9Vi`>Q`JQl1}2(|4*^)l(f2?@N}c`^>LTnHzkLk*?JutOUInWhttunl z%x$PxdhsU0F`u;burR5N1%<11R{F(O@xR^s+N1M3TrQ}VbkdT=3OHVbU8XCzL_tT6 z`D0&aw#K%lPpQPm_c@X+`1Or5sqg~1@eDc_giGioN7)y3OI?i+Id-I4m>!0bVqt;q z9{X~gO5I5nQI1^C9@J2|cP1>fA=vBjHBQ>#iX5ud2N_MDDAesKi0L$dlb3GR$)5rvI%x zgvHwvRC!O=wkZ7iWc^g==1*Jj>BeG3knGbYlsoj*c%4xzu3W@jQnugjnHv>1qDmJs zlm^re@N}a$knEXt3Pv+aph}RHg>&`@z=(%Rij*HKkOchVS0s_3#mqk0wCR-N&L)(^ zCh18x55i?t^ATk)QuA=6LxQpZNYJCX!)?Jl7c@YUl@4P=4{(hm!!e;H zt!GHZ6z^u%q&QpZo-Z_IG?gJPkYd;2l%a^bz|vU#B3Uq>EKNdk8onI>gvw&v`-3{;GjbZ+_pdw7U=BN8mJSbs7rcI-wnq-m~xbDkufS5 zAinyFl%5_BMh#K!pm9^L75K&j(WcW9Onvnxc#C0Mi2>vw%8@=~ZhPqVpm68A9OSa| ziiJ5XlSO{E|Ir?;@jA4t=)TjKp9Ccw^My#Wr3&`^KXbjr5_sX%dFyY6+UfYjKH-Ku zDbN%i!|XQWorqKPf*7l!C|R#Mi!g1E+^_|S&tVHPN2ZQ&Db=%2IAA|)x{@Tivbn)F z>%EFfJJYXeOFUhl)wMN7{B2ZvjZxZ*piT*RRFI`07ykIKuj5lO`R7QVu73I&*Wh|( zpdg>%gA};?RDqXurI>VGteB(t6Befx?7a4dyia~=1f#-;r6k&!wOaFGlD%2rdVP(} z)Gh|Zh5d6Rst0q8>{K>h(;kI)0+ySiKP~1S1XqfG&bPk%neO@fvM$TzQ=Soz%4q#Q zorUAz$41rCaylLMMq#Okx!N#eJa)zlcx8$^sQdSlf_Dw8&|PYCq+#!=*J?9+C|fT2 z^QV%_`~x6XEV zWx|5qtYWUKrHX2*9lPD5!*@tcBY5k2P?YU=-{-+4TODOw1VxDevs2Ff66^a9?vC*c z%PW?fPNntCot0+qWWB zC1EDDLx1ACPO5{6XJhMCesgU|VRx7QZ1nsYc>!6&$JQ#}ts+Xb$NpH#D;@7%tM#M) zrYsQps#(6g-9O-m{q%>0}eQ9|))m0vKBr|Uez8-q- z^s__Jb26&J)0s!ZB9*FGV_<{de@aodSLT$|Y8f}pAFDP{y3<+Sux9_9jC z*vOjQxO$h|c4qU{^|m(qCG0!<3ZT8}=`sL;i79NB)ksH`YD|gCk z26p9x3Mtk+nEvoyTWRB3J!G2XSoMny7h|ewsS%-h&Ogh#bL&m{DNGtAJctl{p+ZJA z$c6YohQV)MiTCkZ^`XM0({YFGJT)7FcSR%S*|uiN_mctCO-Vr<6>M`Tqlz)}?Ht|rtl-1oF{TKFh3Tfa34#I* zx>yiyN=@yzo|jKaynFsnOOPP}JMvdgMMUjeW+-m(`bu`qMmF0_b~Ys(K`{i5A1X@y zeu9sjiYM7f{Zzq>CmNb7lG~YX(jh3Qnb|2QlpKO!L0AeU<$76#gIP_{9C_$)#q_ zQ33lvBHN_~!BVrc9BG6$ZjGB4w24;LQCq7FBPpStOa!U9jd1L-J`S3tq-j9hy z>!(h*JbMO%Cp|_W#vaFOUB_ciA{q|X)+Y$cfrNX)sP7RZHjDbpet$M!emOT4)$!$g zawrUR5Pbp!gbC^iUojC+sv_2)X~0Kyc#un+wTvC~#h?*pe7G7`D*&3uf+1P2H>3cL z!K9r7cQ4DHoenf32jV0rQj~iP3U>tvcSWYyCGg`7em?T1zINFu3?3FF4Sp=WaQ_Wx z(-sS62jbIn;LmUb11+^518aLEXtXOGPa3$_7xosqHWN1H9TP&@?-Sr$$H2K>t8knH zf2Q^Xje6;8MT15$_269f6=LGhh0Ym*7*$n1PzC481Lw+Xy?g1XV<$jw1cIeC!9k7} z`N&S@@B`2$o`eP?O3`2>3F>hW=sdhu1nQXOHVEQM91I4IF92h$aI%06YM<{CGnG{-gg3L_r%aMQTTFSU&6I1m}hY z=~LJ<`Q`J?{d24%A2c`^d@e4ofx+RbfPTrTESs#AogT{x#gw#wkfI{?iFw|D&O$-` zlaAVt6#+iU9RqF}&Ac`%*Q2glLZLkIF!?x$ z5)ORC>Drt9}AIev$ATGR=6iT4h*3lKk zgK+gz{C^7Q@g44?UP198{q`1C!MV#NYhAx61@6y3mva=mEHH+%TCI!!J@)82&x6ji z7MB0h`G3B^=z&_v3)X=|Qw1T_qE_CrI z`v0D~>kSBzUJZTh|6U3l@xO8YpB8Ozjaw|Gj{Us5S$!7<{@uK;b*)In^6`HG1`W)( diff --git a/5-app-infra/4-service-catalog-repo/modules/artifact_registry/README.md b/5-app-infra/4-service-catalog-repo/modules/artifact_registry/README.md deleted file mode 100644 index 0031846c..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/artifact_registry/README.md +++ /dev/null @@ -1,74 +0,0 @@ - -Copyright 2024 Google LLC - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -## Requirements - -No requirements. - -## Providers - -| Name | Version | -|------|---------| -| [google](#provider\_google) | n/a | -| [google-beta](#provider\_google-beta) | n/a | - -## Modules - -No modules. - -## Resources - -| Name | Type | -|------|------| -| [google-beta_google_artifact_registry_repository.registry](https://registry.terraform.io/providers/hashicorp/google-beta/latest/docs/resources/google_artifact_registry_repository) | resource | -| [google_kms_crypto_key.key](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_crypto_key) | data source | -| [google_kms_key_ring.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_key_ring) | data source | -| [google_project.project](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/project) | data source | -| [google_projects.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/projects) | data source | - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| [cleanup\_policies](#input\_cleanup\_policies) | List of cleanup policies. |

list(object({
id = string
action = optional(string)
condition = optional(list(object({
tag_state = optional(string)
tag_prefixes = optional(list(string))
package_name_prefixes = optional(list(string))
older_than = optional(string)
})))
most_recent_versions = optional(list(object({
package_name_prefixes = optional(list(string))
keep_count = optional(number)
})))
}))
|
[
{
"action": "DELETE",
"condition": [
{
"older_than": "2592000s",
"tag_prefixes": [
"alpha",
"v0"
],
"tag_state": "TAGGED"
}
],
"id": "delete-prerelease"
}
]
| no | -| [cleanup\_policy\_dry\_run](#input\_cleanup\_policy\_dry\_run) | Whether to perform a dry run of the cleanup policy. | `bool` | `false` | no | -| [description](#input\_description) | Description of the repository. | `string` | `""` | no | -| [format](#input\_format) | Format of the repository. | `string` | `"DOCKER"` | no | -| [name](#input\_name) | Name of the repository. | `string` | n/a | yes | -| [project\_id](#input\_project\_id) | Optional Project ID. | `string` | `null` | no | -| [region](#input\_region) | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | - -## Outputs - -No outputs. - - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| cleanup\_policies | List of cleanup policies. |
list(object({
id = string
action = optional(string)
condition = optional(list(object({
tag_state = optional(string)
tag_prefixes = optional(list(string))
package_name_prefixes = optional(list(string))
older_than = optional(string)
})))
most_recent_versions = optional(list(object({
package_name_prefixes = optional(list(string))
keep_count = optional(number)
})))
}))
|
[
{
"action": "DELETE",
"condition": [
{
"older_than": "2592000s",
"tag_prefixes": [
"alpha",
"v0"
],
"tag_state": "TAGGED"
}
],
"id": "delete-prerelease"
}
]
| no | -| cleanup\_policy\_dry\_run | Whether to perform a dry run of the cleanup policy. | `bool` | `false` | no | -| description | Description of the repository. | `string` | `""` | no | -| format | Format of the repository. | `string` | `"DOCKER"` | no | -| name | Name of the repository. | `string` | n/a | yes | -| project\_id | Optional Project ID. | `string` | `null` | no | -| region | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | - -## Outputs - -No outputs. - - diff --git a/5-app-infra/4-service-catalog-repo/modules/artifact_registry/data.tf b/5-app-infra/4-service-catalog-repo/modules/artifact_registry/data.tf deleted file mode 100644 index 0ce9ad7b..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/artifact_registry/data.tf +++ /dev/null @@ -1,34 +0,0 @@ -/** - * 1Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -data "google_project" "project" { - project_id = var.project_id -} - -data "google_projects" "kms" { - filter = "labels.application_name:env-kms labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} - -data "google_kms_key_ring" "kms" { - name = local.keyring_name - location = var.region - project = data.google_projects.kms.projects.0.project_id -} - -data "google_kms_crypto_key" "key" { - name = data.google_project.project.name - key_ring = data.google_kms_key_ring.kms.id -} diff --git a/5-app-infra/4-service-catalog-repo/modules/artifact_registry/locals.tf b/5-app-infra/4-service-catalog-repo/modules/artifact_registry/locals.tf deleted file mode 100644 index c11543ac..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/artifact_registry/locals.tf +++ /dev/null @@ -1,23 +0,0 @@ -/** - * 2Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - keyring_name = "sample-keyring" - region_short_code = { - "us-central1" = "usc1" - "us-east4" = "use4" - } -} diff --git a/5-app-infra/4-service-catalog-repo/modules/artifact_registry/main.tf b/5-app-infra/4-service-catalog-repo/modules/artifact_registry/main.tf deleted file mode 100644 index b40cbe91..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/artifact_registry/main.tf +++ /dev/null @@ -1,64 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -resource "google_artifact_registry_repository" "registry" { - provider = google-beta - - project = data.google_project.project.project_id - location = var.region - repository_id = var.name - description = var.description - format = var.format - cleanup_policy_dry_run = var.cleanup_policy_dry_run - - #Customer Managed Encryption Keys - #Control ID: COM-CO-2.3 - #NIST 800-53: SC-12 SC-13 - #CRI Profile: PR.DS-1.1 PR.DS-1.2 PR.DS-2.1 PR.DS-2.2 PR.DS-5.1 - - kms_key_name = data.google_kms_crypto_key.key.id - - #Cleanup policy - #Control ID: AR-CO-6.1 - #NIST 800-53: SI-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - dynamic "cleanup_policies" { - for_each = var.cleanup_policies - content { - id = cleanup_policies.value.id - action = cleanup_policies.value.action - - dynamic "condition" { - for_each = cleanup_policies.value.condition != null ? [cleanup_policies.value.condition] : [] - content { - tag_state = condition.value[0].tag_state - tag_prefixes = condition.value[0].tag_prefixes - package_name_prefixes = condition.value[0].package_name_prefixes - older_than = condition.value[0].older_than - } - } - - dynamic "most_recent_versions" { - for_each = cleanup_policies.value.most_recent_versions != null ? [cleanup_policies.value.most_recent_versions] : [] - content { - package_name_prefixes = most_recent_versions.value[0].package_name_prefixes - keep_count = most_recent_versions.value[0].keep_count - } - } - } - } -} diff --git a/5-app-infra/4-service-catalog-repo/modules/artifact_registry/terraform.tfvars.example b/5-app-infra/4-service-catalog-repo/modules/artifact_registry/terraform.tfvars.example deleted file mode 100644 index 5b8b92cb..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/artifact_registry/terraform.tfvars.example +++ /dev/null @@ -1,43 +0,0 @@ -region = "us-central1" -environment = "development" -project_id = "prj-c-composer" -cmek_project_id = "prj-c-bu3cmek-wo8w" -name = "test-repo" -description = "Artifact Repository" -format = "DOCKER" -cleanup_policy_dry_run = false - -cleanup_policies = [ - { - id = "delete-prerelease" - action = "DELETE" - condition = [ - { - tag_state = "TAGGED" - tag_prefixes = ["alpha", "v0"] - older_than = "2592000s" - } - ] - }, - { - id = "keep-tagged-release" - action = "KEEP" - condition = [ - { - tag_state = "TAGGED" - tag_prefixes = ["release"] - package_name_prefixes = ["webapp", "mobile"] - } - ] - }, - { - id = "keep-minimum-versions" - action = "KEEP" - most_recent_versions = [ - { - package_name_prefixes = ["webapp", "mobile", "sandbox"] - keep_count = 5 - } - ] - } -] diff --git a/5-app-infra/4-service-catalog-repo/modules/artifact_registry/variables.tf b/5-app-infra/4-service-catalog-repo/modules/artifact_registry/variables.tf deleted file mode 100644 index 99cb1a45..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/artifact_registry/variables.tf +++ /dev/null @@ -1,86 +0,0 @@ -/** - */ - -variable "name" { - description = "Name of the repository." - type = string -} - -variable "description" { - description = "Description of the repository." - type = string - default = "" -} - -variable "format" { - description = "Format of the repository." - type = string - default = "DOCKER" -} - -variable "region" { - type = string - description = "The resource region, one of [us-central1, us-east4]." - default = "us-central1" - validation { - condition = contains(["us-central1", "us-east4"], var.region) - error_message = "Region must be one of [us-central1, us-east4]." - } -} - -variable "cleanup_policy_dry_run" { - description = "Whether to perform a dry run of the cleanup policy." - type = bool - default = false -} - -variable "cleanup_policies" { - description = "List of cleanup policies." - type = list(object({ - id = string - action = optional(string) - condition = optional(list(object({ - tag_state = optional(string) - tag_prefixes = optional(list(string)) - package_name_prefixes = optional(list(string)) - older_than = optional(string) - }))) - most_recent_versions = optional(list(object({ - package_name_prefixes = optional(list(string)) - keep_count = optional(number) - }))) - })) - default = [ - { - id = "delete-prerelease" - action = "DELETE" - condition = [ - { - tag_state = "TAGGED" - tag_prefixes = ["alpha", "v0"] - older_than = "2592000s" - } - ] - } - ] -} - -variable "project_id" { - type = string - description = "Optional Project ID." - default = null -} diff --git a/5-app-infra/4-service-catalog-repo/modules/bigquery/README.md b/5-app-infra/4-service-catalog-repo/modules/bigquery/README.md deleted file mode 100644 index 8e898e26..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/bigquery/README.md +++ /dev/null @@ -1,76 +0,0 @@ - -Copyright 2024 Google LLC - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -## Requirements - -No requirements. - -## Providers - -| Name | Version | -|------|---------| -| [google](#provider\_google) | n/a | - -## Modules - -| Name | Source | Version | -|------|--------|---------| -| [bigquery](#module\_bigquery) | terraform-google-modules/bigquery/google | 7.0.0 | - -## Resources - -| Name | Type | -|------|------| -| [google_kms_crypto_key.key](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_crypto_key) | data source | -| [google_kms_key_ring.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_key_ring) | data source | -| [google_project.project](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/project) | data source | -| [google_projects.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/projects) | data source | - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| [dataset\_id](#input\_dataset\_id) | A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (\_). The maximum length is 1,024 characters. | `string` | n/a | yes | -| [default\_partition\_expiration\_ms](#input\_default\_partition\_expiration\_ms) | The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. | `number` | `null` | no | -| [default\_table\_expiration\_ms](#input\_default\_table\_expiration\_ms) | The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property. | `number` | `null` | no | -| [delete\_contents\_on\_destroy](#input\_delete\_contents\_on\_destroy) | If true, delete all the tables in the dataset when destroying the dataset; otherwise, destroying the dataset does not affect the tables in the dataset. If you try to delete a dataset that contains tables, and you set delete\_contents\_on\_destroy to false when you created the dataset, the request will fail. Always use this flag with caution. A missing value is treated as false. | `bool` | `false` | no | -| [description](#input\_description) | A user-friendly description of the dataset | `string` | `""` | no | -| [friendly\_name](#input\_friendly\_name) | A descriptive name for the dataset | `string` | `""` | no | -| [project\_id](#input\_project\_id) | Optional Project ID. | `string` | `null` | no | -| [region](#input\_region) | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | - -## Outputs - -No outputs. - - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| dataset\_id | A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (\_). The maximum length is 1,024 characters. | `string` | n/a | yes | -| default\_partition\_expiration\_ms | The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. | `number` | `null` | no | -| default\_table\_expiration\_ms | The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property. | `number` | `null` | no | -| delete\_contents\_on\_destroy | If true, delete all the tables in the dataset when destroying the dataset; otherwise, destroying the dataset does not affect the tables in the dataset. If you try to delete a dataset that contains tables, and you set delete\_contents\_on\_destroy to false when you created the dataset, the request will fail. Always use this flag with caution. A missing value is treated as false. | `bool` | `false` | no | -| description | A user-friendly description of the dataset | `string` | `""` | no | -| friendly\_name | A descriptive name for the dataset | `string` | `""` | no | -| project\_id | Optional Project ID. | `string` | `null` | no | -| region | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | - -## Outputs - -No outputs. - - diff --git a/5-app-infra/4-service-catalog-repo/modules/bigquery/data.tf b/5-app-infra/4-service-catalog-repo/modules/bigquery/data.tf deleted file mode 100644 index 035a39b9..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/bigquery/data.tf +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -data "google_project" "project" { - project_id = var.project_id -} - -data "google_projects" "kms" { - filter = "labels.application_name:env-kms labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} - -data "google_kms_key_ring" "kms" { - name = local.keyring_name - location = var.region - project = data.google_projects.kms.projects.0.project_id -} - -data "google_kms_crypto_key" "key" { - name = data.google_project.project.name - key_ring = data.google_kms_key_ring.kms.id -} diff --git a/5-app-infra/4-service-catalog-repo/modules/bigquery/locals.tf b/5-app-infra/4-service-catalog-repo/modules/bigquery/locals.tf deleted file mode 100644 index 543a3276..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/bigquery/locals.tf +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - project_hash = substr(sha256(data.google_project.project.project_id), 0, 6) - name_var = "bq-${var.dataset_id}-${data.google_project.project.labels.env_code}-${local.project_hash}" - keyring_name = "sample-keyring" -} diff --git a/5-app-infra/4-service-catalog-repo/modules/bigquery/main.tf b/5-app-infra/4-service-catalog-repo/modules/bigquery/main.tf deleted file mode 100644 index 4939412a..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/bigquery/main.tf +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -module "bigquery" { - source = "terraform-google-modules/bigquery/google" - version = "7.0.0" - dataset_id = var.dataset_id - project_id = data.google_project.project.project_id - location = var.region - encryption_key = data.google_kms_crypto_key.key.id -} diff --git a/5-app-infra/4-service-catalog-repo/modules/bigquery/variables.tf b/5-app-infra/4-service-catalog-repo/modules/bigquery/variables.tf deleted file mode 100644 index ba671067..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/bigquery/variables.tf +++ /dev/null @@ -1,66 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "project_id" { - type = string - description = "Optional Project ID." - default = null -} - -variable "dataset_id" { - description = "A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters." - type = string -} - -variable "region" { - type = string - description = "The resource region, one of [us-central1, us-east4]." - default = "us-central1" - validation { - condition = contains(["us-central1", "us-east4"], var.region) - error_message = "Region must be one of [us-central1, us-east4]." - } -} - -variable "friendly_name" { - description = "A descriptive name for the dataset" - type = string - default = "" -} - -variable "description" { - description = "A user-friendly description of the dataset" - type = string - default = "" -} - -variable "default_partition_expiration_ms" { - description = "The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value." - type = number - default = null -} - -variable "default_table_expiration_ms" { - description = "The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property." - type = number - default = null -} - -variable "delete_contents_on_destroy" { - description = "If true, delete all the tables in the dataset when destroying the dataset; otherwise, destroying the dataset does not affect the tables in the dataset. If you try to delete a dataset that contains tables, and you set delete_contents_on_destroy to false when you created the dataset, the request will fail. Always use this flag with caution. A missing value is treated as false." - type = bool - default = false -} diff --git a/5-app-infra/4-service-catalog-repo/modules/bucket/README.md b/5-app-infra/4-service-catalog-repo/modules/bucket/README.md deleted file mode 100644 index e351f625..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/bucket/README.md +++ /dev/null @@ -1,149 +0,0 @@ -## IAM Permission Requirements - -To execute the provided Terraform configuration the following IAM permissions are required: - -- `cloudkms.cryptoKeys.get` -- `cloudkms.cryptoKeys.setIamPolicy` -- `iam.serviceAccounts.create` -- `iam.serviceAccounts.update` -- `storage.hmacKeys.create` -- `storage.hmacKeys.get` -- `storage.buckets.create` -- `storage.buckets.get` -- `storage.buckets.update` -- `storage.buckets.setIamPolicy` -- `storage.buckets.setLifecycle` -- `storage.objects.create` -- `storage.objects.delete` -- `resourcemanager.projects.get` - -## Notes: -- Additional permissions may be required based on specific use cases and actions within these resources. -- It's recommended to adhere to the principle of least privilege and grant only the permissions necessary for the tasks. -- Assign these permissions via predefined roles or create a custom IAM role encompassing all necessary permissions. -- Always review and adjust permissions according to organizational security policies. - - - -Copyright 2024 Google LLC - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -## Requirements - -No requirements. - -## Providers - -| Name | Version | -|------|---------| -| [google](#provider\_google) | n/a | -| [google-beta](#provider\_google-beta) | n/a | - -## Modules - -No modules. - -## Resources - -| Name | Type | -|------|------| -| [google-beta_google_storage_bucket.bucket](https://registry.terraform.io/providers/hashicorp/google-beta/latest/docs/resources/google_storage_bucket) | resource | -| [google_storage_bucket_object.root_folder](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/storage_bucket_object) | resource | -| [google_kms_crypto_key.key](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_crypto_key) | data source | -| [google_kms_key_ring.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_key_ring) | data source | -| [google_project.project](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/project) | data source | -| [google_projects.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/projects) | data source | -| [google_projects.log](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/projects) | data source | - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| [add\_random\_suffix](#input\_add\_random\_suffix) | whether to add a random suffix to the bucket name | `bool` | `false` | no | -| [dual\_region\_locations](#input\_dual\_region\_locations) | dual region description | `list(string)` | `[]` | no | -| [force\_destroy](#input\_force\_destroy) | (Optional, Default: true) When deleting a bucket, this boolean option will delete all contained objects. If you try to delete a bucket that contains objects, Terraform will fail that run. | `bool` | `true` | no | -| [gcs\_bucket\_prefix](#input\_gcs\_bucket\_prefix) | Name prefix to be used for GCS Bucket | `string` | `"bkt"` | no | -| [labels](#input\_labels) | Labels to be attached to the buckets | `map(string)` |
{
"classification": "dataclassification",
"label": "samplelabel",
"owner": "testowner"
}
| no | -| [lifecycle\_rules](#input\_lifecycle\_rules) | List of lifecycle rules to configure. Format is the same as described in provider documentation https://www.terraform.io/docs/providers/google/r/storage_bucket.html#lifecycle_rule except condition.matches\_storage\_class should be a comma delimited string. |
set(object({
# Object with keys:
# - type - The type of the action of this Lifecycle Rule. Supported values: Delete and SetStorageClass.
# - storage_class - (Required if action type is SetStorageClass) The target Storage Class of objects affected by this Lifecycle Rule.
action = map(string)

# Object with keys:
# - age - (Optional) Minimum age of an object in days to satisfy this condition.
# - created_before - (Optional) Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.
# - with_state - (Optional) Match to live and/or archived objects. Supported values include: "LIVE", "ARCHIVED", "ANY".
# - matches_storage_class - (Optional) Comma delimited string for storage class of objects to satisfy this condition. Supported values include: MULTI_REGIONAL, REGIONAL.
# - num_newer_versions - (Optional) Relevant only for versioned objects. The number of newer versions of an object to satisfy this condition.
# - custom_time_before - (Optional) A date in the RFC 3339 format YYYY-MM-DD. This condition is satisfied when the customTime metadata for the object is set to an earlier date than the date used in this lifecycle condition.
# - days_since_custom_time - (Optional) The number of days from the Custom-Time metadata attribute after which this condition becomes true.
# - days_since_noncurrent_time - (Optional) Relevant only for versioned objects. Number of days elapsed since the noncurrent timestamp of an object.
# - noncurrent_time_before - (Optional) Relevant only for versioned objects. The date in RFC 3339 (e.g. 2017-06-13) when the object became nonconcurrent.
condition = map(string)
}))
|
[
{
"action": {
"storage_class": "NEARLINE",
"type": "SetStorageClass"
},
"condition": {
"age": "30",
"matches_storage_class": "REGIONAL"
}
},
{
"action": {
"type": "Delete"
},
"condition": {
"with_state": "ARCHIVED"
}
}
]
| no | -| [name](#input\_name) | name of bucket | `string` | n/a | yes | -| [object\_folder\_temporary\_hold](#input\_object\_folder\_temporary\_hold) | Set root folder temporary hold according to security control GCS-CO-6.16, toggle off to allow for object deletion. | `bool` | `false` | no | -| [project\_id](#input\_project\_id) | Optional Project ID. | `string` | `null` | no | -| [region](#input\_region) | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | -| [requester\_pays](#input\_requester\_pays) | Enables Requester Pays on a storage bucket. | `bool` | `false` | no | -| [retention\_policy](#input\_retention\_policy) | Map of retention policy values. Format is the same as described in provider documentation https://www.terraform.io/docs/providers/google/r/storage_bucket#retention_policy | `any` | `{}` | no | -| [storage\_class](#input\_storage\_class) | Storage class to create the bucket | `string` | `"STANDARD"` | no | -| [uniform\_bucket\_level\_access](#input\_uniform\_bucket\_level\_access) | Whether to have uniform access levels or not | `bool` | `true` | no | -| [versioning\_enabled](#input\_versioning\_enabled) | Whether to enable versioning or not | `bool` | `true` | no | - -## Outputs - -| Name | Description | -|------|-------------| -| [storage\_bucket](#output\_storage\_bucket) | Storage Bucket. | - - -## Security Controls - -The following table outlines which of the suggested controls for Vertex Generative AI are enabled in this module. -| Name | Control ID | NIST 800-53 | CRI Profile | Category | Source Blueprint -|------|------------|-------------|-------------|----------| ----------------| -|Customer Managed Encryption Keys| COM-CO-2.3| SC-12
SC-13| PR.DS-1.1
PR.DS-2.1
PR.DS-2.2
PR.DS-5.1 | Recommended | Secure Foundation v4 -|Regional Storage Class Lifecycle Rule | GCS-CO-6.11 | SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Regional Storage Class Lifecycle Rule | GCS-CO-6.12 | SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Ensure Lifecycle management is enabled 1 of 2 | GCS-CO-6.13 | SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Ensure Lifecycle management is enabled 2 of 2 | GCS-CO-6.14 | SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Ensure Retention policy is using the bucket lock| GCS-CO-6.15 | SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Object contains a temporary hold and should be evaluated| GCS-CO-6.16 | SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Retention Policy| GCS-CO-6.17 | SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Classification Tag| GCS-CO-6.18 | SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Versioning is Enabled| GCS-CO-6.2| SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Log Bucket Exists| GCS-CO-6.3| AU-2
AU-3
AU-8
AU-9| DM.ED-7.1
DM.ED-7.2
DM.ED-7.3
DM.ED-7.4
PR.IP-1.4 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Labeling Tag| GCS-CO-6.4| SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Deletion Rules| GCS-CO-6.5| SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Deletion Rules For Deleted Objects| GCS-CO-6.6| SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Ensure that versioning is enabled on all Cloud Storage instances| GCS-CO-6.7| SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Owner Tag| GCS-CO-6.8| SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Ensure HMAC keys for service accounts are handled correctly| GCS-CO-6.9| SI-12
SC-13 | PR.IP-1.1
PR.IP-1.2
PR.IP-2.1
PR.DS-2.2
PR.DS-5.1 | Required | ML Foundation v0.1.0-alpha.1 -|Owner Tag| GCS-CO-7.1| SI-12 | PR.IP-2.1
PR.IP-2.2
PR.IP-2.3 | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 - - - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| add\_random\_suffix | whether to add a random suffix to the bucket name | `bool` | `false` | no | -| dual\_region\_locations | dual region description | `list(string)` | `[]` | no | -| force\_destroy | (Optional, Default: true) When deleting a bucket, this boolean option will delete all contained objects. If you try to delete a bucket that contains objects, Terraform will fail that run. | `bool` | `true` | no | -| gcs\_bucket\_prefix | Name prefix to be used for GCS Bucket | `string` | `"bkt"` | no | -| labels | Labels to be attached to the buckets | `map(string)` |
{
"classification": "dataclassification",
"label": "samplelabel",
"owner": "testowner"
}
| no | -| lifecycle\_rules | List of lifecycle rules to configure. Format is the same as described in provider documentation https://www.terraform.io/docs/providers/google/r/storage_bucket.html#lifecycle_rule except condition.matches\_storage\_class should be a comma delimited string. |
set(object({
# Object with keys:
# - type - The type of the action of this Lifecycle Rule. Supported values: Delete and SetStorageClass.
# - storage_class - (Required if action type is SetStorageClass) The target Storage Class of objects affected by this Lifecycle Rule.
action = map(string)

# Object with keys:
# - age - (Optional) Minimum age of an object in days to satisfy this condition.
# - created_before - (Optional) Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.
# - with_state - (Optional) Match to live and/or archived objects. Supported values include: "LIVE", "ARCHIVED", "ANY".
# - matches_storage_class - (Optional) Comma delimited string for storage class of objects to satisfy this condition. Supported values include: MULTI_REGIONAL, REGIONAL.
# - num_newer_versions - (Optional) Relevant only for versioned objects. The number of newer versions of an object to satisfy this condition.
# - custom_time_before - (Optional) A date in the RFC 3339 format YYYY-MM-DD. This condition is satisfied when the customTime metadata for the object is set to an earlier date than the date used in this lifecycle condition.
# - days_since_custom_time - (Optional) The number of days from the Custom-Time metadata attribute after which this condition becomes true.
# - days_since_noncurrent_time - (Optional) Relevant only for versioned objects. Number of days elapsed since the noncurrent timestamp of an object.
# - noncurrent_time_before - (Optional) Relevant only for versioned objects. The date in RFC 3339 (e.g. 2017-06-13) when the object became nonconcurrent.
condition = map(string)
}))
|
[
{
"action": {
"storage_class": "NEARLINE",
"type": "SetStorageClass"
},
"condition": {
"age": "30",
"matches_storage_class": "REGIONAL"
}
},
{
"action": {
"type": "Delete"
},
"condition": {
"with_state": "ARCHIVED"
}
}
]
| no | -| name | name of bucket | `string` | n/a | yes | -| object\_folder\_temporary\_hold | Set root folder temporary hold according to security control GCS-CO-6.16, toggle off to allow for object deletion. | `bool` | `false` | no | -| project\_id | Optional Project ID. | `string` | `null` | no | -| region | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | -| requester\_pays | Enables Requester Pays on a storage bucket. | `bool` | `false` | no | -| retention\_policy | Map of retention policy values. Format is the same as described in provider documentation https://www.terraform.io/docs/providers/google/r/storage_bucket#retention_policy | `any` | `{}` | no | -| storage\_class | Storage class to create the bucket | `string` | `"STANDARD"` | no | -| uniform\_bucket\_level\_access | Whether to have uniform access levels or not | `bool` | `true` | no | -| versioning\_enabled | Whether to enable versioning or not | `bool` | `true` | no | - -## Outputs - -| Name | Description | -|------|-------------| -| storage\_bucket | Storage Bucket. | - - - diff --git a/5-app-infra/4-service-catalog-repo/modules/bucket/data.tf b/5-app-infra/4-service-catalog-repo/modules/bucket/data.tf deleted file mode 100644 index 61a44b27..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/bucket/data.tf +++ /dev/null @@ -1,38 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -data "google_project" "project" { - project_id = var.project_id -} - -data "google_projects" "kms" { - filter = "labels.application_name:env-kms labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} - -data "google_kms_key_ring" "kms" { - name = local.keyring_name - location = var.region - project = data.google_projects.kms.projects.0.project_id -} - -data "google_kms_crypto_key" "key" { - name = data.google_project.project.name - key_ring = data.google_kms_key_ring.kms.id -} - -data "google_projects" "log" { - filter = "labels.application_name:env-logging labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} diff --git a/5-app-infra/4-service-catalog-repo/modules/bucket/locals.tf b/5-app-infra/4-service-catalog-repo/modules/bucket/locals.tf deleted file mode 100644 index c640c38e..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/bucket/locals.tf +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - log_bucket_prefix = "bkt" - keyring_name = "sample-keyring" -} diff --git a/5-app-infra/4-service-catalog-repo/modules/bucket/main.tf b/5-app-infra/4-service-catalog-repo/modules/bucket/main.tf deleted file mode 100644 index 1ab54cbf..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/bucket/main.tf +++ /dev/null @@ -1,148 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -resource "google_storage_bucket" "bucket" { - provider = google-beta - name = join("-", [var.gcs_bucket_prefix, data.google_projects.log.projects.0.labels.env_code, var.name]) - project = data.google_project.project.project_id - location = upper(var.region) - - dynamic "custom_placement_config" { - for_each = length(var.dual_region_locations) != 0 ? [1] : [] - content { - data_locations = var.dual_region_locations - } - } - - force_destroy = var.force_destroy - uniform_bucket_level_access = var.uniform_bucket_level_access - storage_class = var.storage_class - public_access_prevention = "enforced" - - #Versioning is Enabled - #Control ID: GCS-CO-6.2 and GCS-CO-6.7 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - versioning { - enabled = var.versioning_enabled - } - - #Labeling Tag - #Control ID: GCS-CO-6.4 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - labels = var.labels - - #Retention Policy - #Control ID: GCS-CO-6.17 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - dynamic "retention_policy" { - for_each = var.retention_policy != {} ? [var.retention_policy] : [] - content { - - #Ensure Retention policy is using the bucket lock - #Control ID: GCS-CO-6.13 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - is_locked = lookup(retention_policy.value, "is_locked", null) - retention_period = lookup(retention_policy.value, "retention_period", null) - } - } - - #Ensure Lifecycle management is enabled 1 of 2 - #Control ID: GCS-CO-6.13 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - #Ensure Lifecycle management is enabled 2 of 2 - #Control ID: GCS-CO-6.14 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - dynamic "lifecycle_rule" { - for_each = var.lifecycle_rules - content { - action { - type = lifecycle_rule.value.action.type - - #Regional Storage Class Lifecycle Rule - #Control ID: GCS-CO-6.11 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - storage_class = lookup(lifecycle_rule.value.action, "storage_class", null) - } - condition { - age = lookup(lifecycle_rule.value.condition, "age", null) - created_before = lookup(lifecycle_rule.value.condition, "created_before", null) - with_state = lookup(lifecycle_rule.value.condition, "with_state", lookup(lifecycle_rule.value.condition, "is_live", false) ? "LIVE" : null) - - #Regional Storage Class Lifecycle Rule - #Control ID: GCS-CO-6.12 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - matches_storage_class = contains(keys(lifecycle_rule.value.condition), "matches_storage_class") ? split(",", lifecycle_rule.value.condition["matches_storage_class"]) : null - num_newer_versions = lookup(lifecycle_rule.value.condition, "num_newer_versions", null) - custom_time_before = lookup(lifecycle_rule.value.condition, "custom_time_before", null) - days_since_custom_time = lookup(lifecycle_rule.value.condition, "days_since_custom_time", null) - days_since_noncurrent_time = lookup(lifecycle_rule.value.condition, "days_since_noncurrent_time", null) - noncurrent_time_before = lookup(lifecycle_rule.value.condition, "noncurrent_time_before", null) - } - } - } - - #Customer Managed Encryption Keys - #Control ID: COM-CO-2.3 - #NIST 800-53: SC-12 SC-13 - #CRI Profile: PR.DS-1.1 PR.DS-1.2 PR.DS-2.1 PR.DS-2.2 PR.DS-5.1 - - encryption { - default_kms_key_name = data.google_kms_crypto_key.key.id - } - - #Log Bucket Exists - #Control ID: GCS-CO-6.3 and GCS-CO-7.1 - #NIST 800-53: AU-2 AU-3 AU-8 AU-9 - #CRI Profile: DM.ED-7.1 DM.ED-7.2 DM.ED-7.3 DM.ED-7.4 PR.IP-1.4 - - logging { - log_bucket = join("-", [local.log_bucket_prefix, data.google_projects.log.projects.0.project_id]) - } -} - -resource "google_storage_bucket_object" "root_folder" { - name = "root/" - content = " " - bucket = google_storage_bucket.bucket.name - - #Object contains a temporary hold and should be evaluated - #Control ID: GCS-CO-6.16 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - temporary_hold = var.object_folder_temporary_hold - - #Customer Managed Encryption Keys - #Control ID: COM-CO-2.3 - #NIST 800-53: SC-12 SC-13 - #CRI Profile: PR.DS-1.1 PR.DS-1.2 PR.DS-2.1 PR.DS-2.2 PR.DS-5.1 -} diff --git a/5-app-infra/4-service-catalog-repo/modules/bucket/outputs.tf b/5-app-infra/4-service-catalog-repo/modules/bucket/outputs.tf deleted file mode 100644 index 248be99c..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/bucket/outputs.tf +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -output "storage_bucket" { - description = "Storage Bucket." - value = google_storage_bucket.bucket -} diff --git a/5-app-infra/4-service-catalog-repo/modules/bucket/variables.tf b/5-app-infra/4-service-catalog-repo/modules/bucket/variables.tf deleted file mode 100644 index 9abddef7..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/bucket/variables.tf +++ /dev/null @@ -1,186 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "name" { - type = string - description = "name of bucket" -} - -variable "region" { - type = string - description = "The resource region, one of [us-central1, us-east4]." - default = "us-central1" - validation { - condition = contains(["us-central1", "us-east4"], var.region) - error_message = "Region must be one of [us-central1, us-east4]." - } -} - -variable "dual_region_locations" { - type = list(string) - default = [] - description = "dual region description" - validation { - condition = length(var.dual_region_locations) == 0 || length(var.dual_region_locations) == 2 - error_message = "Exactly 0 or 2 regions expected." - } -} - -variable "force_destroy" { - type = bool - description = "(Optional, Default: true) When deleting a bucket, this boolean option will delete all contained objects. If you try to delete a bucket that contains objects, Terraform will fail that run." - default = true -} - -variable "versioning_enabled" { - type = bool - description = "Whether to enable versioning or not" - default = true -} - -variable "lifecycle_rules" { - type = set(object({ - # Object with keys: - # - type - The type of the action of this Lifecycle Rule. Supported values: Delete and SetStorageClass. - # - storage_class - (Required if action type is SetStorageClass) The target Storage Class of objects affected by this Lifecycle Rule. - action = map(string) - - # Object with keys: - # - age - (Optional) Minimum age of an object in days to satisfy this condition. - # - created_before - (Optional) Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition. - # - with_state - (Optional) Match to live and/or archived objects. Supported values include: "LIVE", "ARCHIVED", "ANY". - # - matches_storage_class - (Optional) Comma delimited string for storage class of objects to satisfy this condition. Supported values include: MULTI_REGIONAL, REGIONAL. - # - num_newer_versions - (Optional) Relevant only for versioned objects. The number of newer versions of an object to satisfy this condition. - # - custom_time_before - (Optional) A date in the RFC 3339 format YYYY-MM-DD. This condition is satisfied when the customTime metadata for the object is set to an earlier date than the date used in this lifecycle condition. - # - days_since_custom_time - (Optional) The number of days from the Custom-Time metadata attribute after which this condition becomes true. - # - days_since_noncurrent_time - (Optional) Relevant only for versioned objects. Number of days elapsed since the noncurrent timestamp of an object. - # - noncurrent_time_before - (Optional) Relevant only for versioned objects. The date in RFC 3339 (e.g. 2017-06-13) when the object became nonconcurrent. - condition = map(string) - })) - description = "List of lifecycle rules to configure. Format is the same as described in provider documentation https://www.terraform.io/docs/providers/google/r/storage_bucket.html#lifecycle_rule except condition.matches_storage_class should be a comma delimited string." - default = [ - { - #Deletion Rules - #Control ID: GCS-CO-6.5 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - action = { - type = "SetStorageClass" - storage_class = "NEARLINE" - } - condition = { - age = "30" - matches_storage_class = "REGIONAL" - } - }, - { - #Deletion Rules - #Control ID: GCS-CO-6.6 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - action = { - type = "Delete" - } - condition = { - with_state = "ARCHIVED" - } - } - ] -} - -variable "retention_policy" { - type = any - default = {} - description = "Map of retention policy values. Format is the same as described in provider documentation https://www.terraform.io/docs/providers/google/r/storage_bucket#retention_policy" -} - -variable "object_folder_temporary_hold" { - type = bool - default = false - description = "Set root folder temporary hold according to security control GCS-CO-6.16, toggle off to allow for object deletion." -} - -#Labeling Tag -#Control ID: GCS-CO-6.4 -#NIST 800-53: SC-12 -#CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - -variable "labels" { - description = "Labels to be attached to the buckets" - type = map(string) - default = { - #Labelling tag - #Control ID: GCS-CO-6.4 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - label = "samplelabel" - - #Owner Tag - #Control ID: GCS-CO-6.8 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - owner = "testowner" - - #Classification Tag - #Control ID: GCS-CO-6.18 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - classification = "dataclassification" - } -} - -variable "add_random_suffix" { - description = "whether to add a random suffix to the bucket name" - type = bool - default = false -} - -variable "uniform_bucket_level_access" { - description = "Whether to have uniform access levels or not" - type = bool - default = true -} - -variable "storage_class" { - type = string - description = "Storage class to create the bucket" - default = "STANDARD" - validation { - condition = contains(["STANDARD", "MULTI_REGIONAL", "REGIONAL", "NEARLINE", "COLDLINE", "ARCHIVE"], var.storage_class) - error_message = "Storage class can be one of STANDARD, MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE." - } -} - -variable "requester_pays" { - description = "Enables Requester Pays on a storage bucket." - type = bool - default = false -} - -variable "gcs_bucket_prefix" { - description = "Name prefix to be used for GCS Bucket" - type = string - default = "bkt" -} - -variable "project_id" { - type = string - description = "Optional Project ID." - default = null -} diff --git a/5-app-infra/4-service-catalog-repo/modules/composer/README.md b/5-app-infra/4-service-catalog-repo/modules/composer/README.md deleted file mode 100644 index 87e59ee6..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/composer/README.md +++ /dev/null @@ -1,129 +0,0 @@ - -Copyright 2024 Google LLC - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -## Requirements - -No requirements. - -## Providers - -| Name | Version | -|------|---------| -| [google](#provider\_google) | n/a | -| [google-beta](#provider\_google-beta) | n/a | -| [random](#provider\_random) | n/a | - -## Modules - -| Name | Source | Version | -|------|--------|---------| -| [vpc](#module\_vpc) | terraform-google-modules/network/google | ~> 8.1 | - -## Resources - -| Name | Type | -|------|------| -| [google-beta_google_cloudbuildv2_connection.repo_connect](https://registry.terraform.io/providers/hashicorp/google-beta/latest/docs/resources/google_cloudbuildv2_connection) | resource | -| [google-beta_google_cloudbuildv2_repository.repo](https://registry.terraform.io/providers/hashicorp/google-beta/latest/docs/resources/google_cloudbuildv2_repository) | resource | -| [google-beta_google_composer_environment.cluster](https://registry.terraform.io/providers/hashicorp/google-beta/latest/docs/resources/google_composer_environment) | resource | -| [google_cloudbuild_trigger.zip_files](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/cloudbuild_trigger) | resource | -| [google_secret_manager_secret_iam_policy.policy](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/secret_manager_secret_iam_policy) | resource | -| [google_service_account.trigger_sa](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/service_account) | resource | -| [google_service_account_iam_member.trigger_sa_impersonate](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/service_account_iam_member) | resource | -| [random_shuffle.zones](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/shuffle) | resource | -| [google_iam_policy.serviceagent_secretAccessor](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/iam_policy) | data source | -| [google_kms_crypto_key.key](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_crypto_key) | data source | -| [google_kms_key_ring.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_key_ring) | data source | -| [google_netblock_ip_ranges.health_checkers](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/netblock_ip_ranges) | data source | -| [google_netblock_ip_ranges.iap_forwarders](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/netblock_ip_ranges) | data source | -| [google_netblock_ip_ranges.legacy_health_checkers](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/netblock_ip_ranges) | data source | -| [google_project.project](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/project) | data source | -| [google_projects.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/projects) | data source | -| [google_pubsub_topic.secret_rotations](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/pubsub_topic) | data source | -| [google_secret_manager_secret.github_api_secret](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/secret_manager_secret) | data source | -| [google_secret_manager_secret_version.github_api](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/secret_manager_secret_version) | data source | -| [google_service_account.composer](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/service_account) | data source | - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| [airflow\_config\_overrides](#input\_airflow\_config\_overrides) | Airflow configuration properties to override. Property keys contain the section and property names, separated by a hyphen, for example "core-dags\_are\_paused\_at\_creation". | `map(string)` | `{}` | no | -| [env\_variables](#input\_env\_variables) | Additional environment variables to provide to the Apache Airflow scheduler, worker, and webserver processes. Environment variable names must match the regular expression [a-zA-Z\_][a-zA-Z0-9\_]*. They cannot specify Apache Airflow software configuration overrides (they cannot match the regular expression AIRFLOW\_\_[A-Z0-9\_]+\_\_[A-Z0-9\_]+), and they cannot match any of the following reserved names: [AIRFLOW\_HOME,C\_FORCE\_ROOT,CONTAINER\_NAME,DAGS\_FOLDER,GCP\_PROJECT,GCS\_BUCKET,GKE\_CLUSTER\_NAME,SQL\_DATABASE,SQL\_INSTANCE,SQL\_PASSWORD,SQL\_PROJECT,SQL\_REGION,SQL\_USER] | `map(any)` | `{}` | no | -| [github\_app\_installation\_id](#input\_github\_app\_installation\_id) | The app installation ID that was created when installing Google Cloud Build in Github: https://github.com/apps/google-cloud-build | `number` | n/a | yes | -| [github\_name\_prefix](#input\_github\_name\_prefix) | A name for your github connection to cloubuild | `string` | `"github-modules"` | no | -| [github\_remote\_uri](#input\_github\_remote\_uri) | Url of your github repo | `string` | n/a | yes | -| [github\_secret\_name](#input\_github\_secret\_name) | Name of the github secret to extract github token info | `string` | `"github-api-token"` | no | -| [image\_version](#input\_image\_version) | The version of the aiflow running in the cloud composer environment. | `string` | `"composer-2.5.2-airflow-2.6.3"` | no | -| [labels](#input\_labels) | The resource labels (a map of key/value pairs) to be applied to the Cloud Composer. | `map(string)` | `{}` | no | -| [maintenance\_window](#input\_maintenance\_window) | The configuration settings for Cloud Composer maintenance window. |
object({
start_time = string
end_time = string
recurrence = string
})
|
{
"end_time": "2021-01-01T13:00:00Z",
"recurrence": "FREQ=WEEKLY;BYDAY=SU",
"start_time": "2021-01-01T01:00:00Z"
}
| no | -| [name](#input\_name) | name of the Composer environment | `string` | n/a | yes | -| [project\_id](#input\_project\_id) | Optional project ID where Cloud Composer Environment is created. | `string` | `null` | no | -| [pypi\_packages](#input\_pypi\_packages) | Custom Python Package Index (PyPI) packages to be installed in the environment. Keys refer to the lowercase package name (e.g. "numpy"). | `map(string)` | `{}` | no | -| [python\_version](#input\_python\_version) | The default version of Python used to run the Airflow scheduler, worker, and webserver processes. | `string` | `"3"` | no | -| [region](#input\_region) | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | -| [service\_account\_prefix](#input\_service\_account\_prefix) | Name prefix to use for service accounts. | `string` | `"sa"` | no | -| [web\_server\_allowed\_ip\_ranges](#input\_web\_server\_allowed\_ip\_ranges) | The network-level access control policy for the Airflow web server. If unspecified, no network-level access restrictions will be applied. |
list(object({
value = string
description = string
}))
| `null` | no | - -## Outputs - -| Name | Description | -|------|-------------| -| [airflow\_uri](#output\_airflow\_uri) | URI of the Apache Airflow Web UI hosted within the Cloud Composer Environment. | -| [composer\_env\_id](#output\_composer\_env\_id) | ID of Cloud Composer Environment. | -| [composer\_env\_name](#output\_composer\_env\_name) | Name of the Cloud Composer Environment. | -| [gcs\_bucket](#output\_gcs\_bucket) | Google Cloud Storage bucket which hosts DAGs for the Cloud Composer Environment. | -| [gke\_cluster](#output\_gke\_cluster) | Google Kubernetes Engine cluster used to run the Cloud Composer Environment. | - - -## Security Controls - -The following table outlines which of the suggested controls for Vertex Generative AI are enabled in this module. -| Name | Control ID | NIST 800-53 | CRI Profile | Category | Source Blueprint -|------|------------|-------------|-------------|----------| ----------------| -|Customer Managed Encryption Keys| COM-CO-2.3| SC-12
SC-13| PR.DS-1.1
PR.DS-2.1
PR.DS-2.2
PR.DS-5.1 | Recommended | Secure Foundation v4 - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| airflow\_config\_overrides | Airflow configuration properties to override. Property keys contain the section and property names, separated by a hyphen, for example "core-dags\_are\_paused\_at\_creation". | `map(string)` | `{}` | no | -| env\_variables | Additional environment variables to provide to the Apache Airflow scheduler, worker, and webserver processes. Environment variable names must match the regular expression [a-zA-Z\_][a-zA-Z0-9\_]*. They cannot specify Apache Airflow software configuration overrides (they cannot match the regular expression AIRFLOW\_\_[A-Z0-9\_]+\_\_[A-Z0-9\_]+), and they cannot match any of the following reserved names: [AIRFLOW\_HOME,C\_FORCE\_ROOT,CONTAINER\_NAME,DAGS\_FOLDER,GCP\_PROJECT,GCS\_BUCKET,GKE\_CLUSTER\_NAME,SQL\_DATABASE,SQL\_INSTANCE,SQL\_PASSWORD,SQL\_PROJECT,SQL\_REGION,SQL\_USER] | `map(any)` | `{}` | no | -| github\_app\_installation\_id | The app installation ID that was created when installing Google Cloud Build in Github: https://github.com/apps/google-cloud-build | `number` | n/a | yes | -| github\_name\_prefix | A name for your github connection to cloubuild | `string` | `"github-modules"` | no | -| github\_remote\_uri | Url of your github repo | `string` | n/a | yes | -| github\_secret\_name | Name of the github secret to extract github token info | `string` | `"github-api-token"` | no | -| image\_version | The version of the aiflow running in the cloud composer environment. | `string` | `"composer-2.5.2-airflow-2.6.3"` | no | -| labels | The resource labels (a map of key/value pairs) to be applied to the Cloud Composer. | `map(string)` | `{}` | no | -| maintenance\_window | The configuration settings for Cloud Composer maintenance window. |
object({
start_time = string
end_time = string
recurrence = string
})
|
{
"end_time": "2021-01-01T13:00:00Z",
"recurrence": "FREQ=WEEKLY;BYDAY=SU",
"start_time": "2021-01-01T01:00:00Z"
}
| no | -| name | name of the Composer environment | `string` | n/a | yes | -| project\_id | Optional project ID where Cloud Composer Environment is created. | `string` | `null` | no | -| pypi\_packages | Custom Python Package Index (PyPI) packages to be installed in the environment. Keys refer to the lowercase package name (e.g. "numpy"). | `map(string)` | `{}` | no | -| python\_version | The default version of Python used to run the Airflow scheduler, worker, and webserver processes. | `string` | `"3"` | no | -| region | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | -| service\_account\_prefix | Name prefix to use for service accounts. | `string` | `"sa"` | no | -| web\_server\_allowed\_ip\_ranges | The network-level access control policy for the Airflow web server. If unspecified, no network-level access restrictions will be applied. |
list(object({
value = string
description = string
}))
| `null` | no | - -## Outputs - -| Name | Description | -|------|-------------| -| airflow\_uri | URI of the Apache Airflow Web UI hosted within the Cloud Composer Environment. | -| composer\_env\_id | ID of Cloud Composer Environment. | -| composer\_env\_name | Name of the Cloud Composer Environment. | -| gcs\_bucket | Google Cloud Storage bucket which hosts DAGs for the Cloud Composer Environment. | -| gke\_cluster | Google Kubernetes Engine cluster used to run the Cloud Composer Environment. | - - diff --git a/5-app-infra/4-service-catalog-repo/modules/composer/data.tf b/5-app-infra/4-service-catalog-repo/modules/composer/data.tf deleted file mode 100644 index 5d66e4b3..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/composer/data.tf +++ /dev/null @@ -1,66 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -data "google_netblock_ip_ranges" "legacy_health_checkers" { - range_type = "legacy-health-checkers" -} - -data "google_netblock_ip_ranges" "health_checkers" { - range_type = "health-checkers" -} - -// Cloud IAP's TCP forwarding netblock -data "google_netblock_ip_ranges" "iap_forwarders" { - range_type = "iap-forwarders" -} - -data "google_project" "project" { - project_id = var.project_id -} - -data "google_projects" "kms" { - filter = "labels.application_name:env-kms labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} - -data "google_kms_key_ring" "kms" { - name = local.keyring_name - location = var.region - project = data.google_projects.kms.projects.0.project_id -} - -data "google_kms_crypto_key" "key" { - name = data.google_project.project.name - key_ring = data.google_kms_key_ring.kms.id -} - -data "google_service_account" "composer" { - account_id = format("%s-%s-%s", var.service_account_prefix, data.google_project.project.labels.env_code, "composer") - project = data.google_project.project.project_id -} - -data "google_pubsub_topic" "secret_rotations" { - name = "secret-rotation-notifications" - project = data.google_project.project.project_id -} - -data "google_secret_manager_secret" "github_api_secret" { - secret_id = var.github_secret_name - project = data.google_project.project.project_id -} - -data "google_secret_manager_secret_version" "github_api" { - secret = data.google_secret_manager_secret.github_api_secret.id -} diff --git a/5-app-infra/4-service-catalog-repo/modules/composer/locals.tf b/5-app-infra/4-service-catalog-repo/modules/composer/locals.tf deleted file mode 100644 index 795efcc1..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/composer/locals.tf +++ /dev/null @@ -1,74 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - composer_node_use4 = "172.16.8.0/22" - composer_node_usc1 = "172.17.8.0/22" - - # secondary - pods_use4 = "172.18.0.0/16" - services_use4 = "172.16.12.0/22" - - pods_usc1 = "172.19.0.0/16" - services_usc1 = "172.17.12.0/22" - - # composer specific - composer_master_use4 = "192.168.0.0/28" - composer_master_usc1 = "192.168.1.0/28" - - composer_webserver_use4 = "192.168.2.0/29" - composer_webserver_usc1 = "192.168.3.0/29" - - private_service_connect_ip = "10.116.46.2" - - keyring_name = "sample-keyring" - - sa_name = format("%s-%s", data.google_project.project.labels.env_code, var.name) - - labels = merge( - var.labels, - { - "environment" = data.google_project.project.labels.environment - "env_code" = data.google_project.project.labels.env_code - } - ) - region_short_code = { - "us-central1" = "usc1" - "us-east4" = "use4" - } - zones = { - "us-central1" = ["a", "b", "c"] - "us-east4" = ["a", "b", "c"] - } - network_name = var.region == "us-central1" ? "composer-vpc-usc1" : "composer-vpc-use4" - subnetwork = var.region == "us-central1" ? "composer-primary-usc1" : "composer-primary-use4" - services_secondary_range_name = var.region == "us-central1" ? "composer-services-primary-usc1" : "composer-services-primary-use4" - cluster_secondary_range_name = var.region == "us-central1" ? "pods-primary-usc1" : "pods-primary-use4" - - service_agents = [ - "artifactregistry.googleapis.com", - "composer.googleapis.com", - "compute.googleapis.com", - "container.googleapis.com", - "pubsub.googleapis.com", - "storage.googleapis.com", - "secretmanager.googleapis.com" - ] - - tags = var.region == "us-central1" ? ["composer-usc1"] : ["composer-use4"] - - github_repository = replace(var.github_remote_uri, "https://", "") -} diff --git a/5-app-infra/4-service-catalog-repo/modules/composer/main.tf b/5-app-infra/4-service-catalog-repo/modules/composer/main.tf deleted file mode 100644 index 193102f9..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/composer/main.tf +++ /dev/null @@ -1,93 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -resource "random_shuffle" "zones" { - input = local.zones[var.region] - result_count = 1 -} - -resource "google_composer_environment" "cluster" { - provider = google-beta - - project = data.google_project.project.project_id - name = var.name - region = var.region - labels = local.labels - - config { - node_config { - network = "projects/${data.google_project.project.project_id}/global/networks/${local.network_name}" - subnetwork = "projects/${data.google_project.project.project_id}/regions/${var.region}/subnetworks/${local.subnetwork}" - service_account = data.google_service_account.composer.email - tags = local.tags - - ip_allocation_policy { - cluster_secondary_range_name = local.cluster_secondary_range_name - services_secondary_range_name = local.services_secondary_range_name - } - } - - private_environment_config { - enable_private_endpoint = true - master_ipv4_cidr_block = var.region == "us-central1" ? "192.168.1.0/28" : "192.168.0.0/28" - cloud_sql_ipv4_cidr_block = var.region == "us-central1" ? "192.168.5.0/24" : "192.168.4.0/24" - } - - maintenance_window { - start_time = var.maintenance_window.start_time - end_time = var.maintenance_window.end_time - recurrence = var.maintenance_window.recurrence - } - - dynamic "web_server_network_access_control" { - for_each = var.web_server_allowed_ip_ranges == null ? [] : [1] - content { - dynamic "allowed_ip_range" { - for_each = var.web_server_allowed_ip_ranges - content { - value = allowed_ip_range.value.value - description = allowed_ip_range.value.description - } - } - } - } - - # allow the capability to set software overrides - dynamic "software_config" { - for_each = var.python_version != "" ? [ - { - airflow_config_overrides = var.airflow_config_overrides - env_variables = var.env_variables - image_version = var.image_version - pypi_packages = var.pypi_packages - }] : [] - content { - airflow_config_overrides = software_config.value["airflow_config_overrides"] - env_variables = software_config.value["env_variables"] - image_version = software_config.value["image_version"] - pypi_packages = software_config.value["pypi_packages"] - } - } - - encryption_config { - kms_key_name = data.google_kms_crypto_key.key.id - } - } - - depends_on = [ - module.vpc, - ] -} diff --git a/5-app-infra/4-service-catalog-repo/modules/composer/outputs.tf b/5-app-infra/4-service-catalog-repo/modules/composer/outputs.tf deleted file mode 100644 index 499525d6..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/composer/outputs.tf +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -output "composer_env_name" { - value = google_composer_environment.cluster.name - description = "Name of the Cloud Composer Environment." -} - -output "composer_env_id" { - value = google_composer_environment.cluster.id - description = "ID of Cloud Composer Environment." -} - -output "gke_cluster" { - value = google_composer_environment.cluster.config.0.gke_cluster - description = "Google Kubernetes Engine cluster used to run the Cloud Composer Environment." -} - -output "gcs_bucket" { - value = google_composer_environment.cluster.config.0.dag_gcs_prefix - description = "Google Cloud Storage bucket which hosts DAGs for the Cloud Composer Environment." -} - -output "airflow_uri" { - value = google_composer_environment.cluster.config.0.airflow_uri - description = "URI of the Apache Airflow Web UI hosted within the Cloud Composer Environment." -} diff --git a/5-app-infra/4-service-catalog-repo/modules/composer/pipeline.tf b/5-app-infra/4-service-catalog-repo/modules/composer/pipeline.tf deleted file mode 100644 index 8258abf4..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/composer/pipeline.tf +++ /dev/null @@ -1,121 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -data "google_iam_policy" "serviceagent_secretAccessor" { - binding { - role = "roles/secretmanager.secretAccessor" - members = ["serviceAccount:service-${data.google_project.project.number}@gcp-sa-cloudbuild.iam.gserviceaccount.com"] - } -} - -resource "google_secret_manager_secret_iam_policy" "policy" { - project = data.google_secret_manager_secret.github_api_secret.project - secret_id = data.google_secret_manager_secret.github_api_secret.secret_id - policy_data = data.google_iam_policy.serviceagent_secretAccessor.policy_data -} - -resource "google_cloudbuildv2_connection" "repo_connect" { - provider = google-beta - project = data.google_project.project.project_id - location = var.region - name = "${var.github_name_prefix}-connection" - - github_config { - app_installation_id = var.github_app_installation_id - authorizer_credential { - oauth_token_secret_version = data.google_secret_manager_secret_version.github_api.id - } - } - depends_on = [google_secret_manager_secret_iam_policy.policy] -} - -resource "google_cloudbuildv2_repository" "repo" { - provider = google-beta - project = data.google_project.project.project_id - location = var.region - name = "${var.github_name_prefix}-repo" - parent_connection = google_cloudbuildv2_connection.repo_connect.id - remote_uri = var.github_remote_uri -} - -###### Added in but not used yet ######## -resource "google_service_account" "trigger_sa" { - account_id = "sa-apps-${local.sa_name}" - project = data.google_project.project.project_id - description = "Service account for Cloud Build in ${data.google_project.project.project_id}" -} -###### Added in but not used yet ######## -resource "google_service_account_iam_member" "trigger_sa_impersonate" { - service_account_id = google_service_account.trigger_sa.id - role = "roles/iam.serviceAccountTokenCreator" - member = "serviceAccount:${data.google_project.project.number}@cloudbuild.gserviceaccount.com" -} - -resource "google_cloudbuild_trigger" "zip_files" { - name = "zip-tf-files-trigger" - project = data.google_project.project.project_id - location = var.region - - repository_event_config { - repository = google_cloudbuildv2_repository.repo.id - push { - branch = "^${local.labels.environment}$" - } - } - build { - step { - id = "unshallow" - name = "gcr.io/cloud-builders/git" - secret_env = ["token"] - entrypoint = "/bin/bash" - args = [ - "-c", - "git fetch --unshallow https://$token@${local.github_repository}" - ] - - } - available_secrets { - secret_manager { - env = "token" - version_name = data.google_secret_manager_secret.github_api_secret.name - } - } - step { - id = "find-folders-affected-in-push" - name = "gcr.io/cloud-builders/gsutil" - entrypoint = "/bin/bash" - args = [ - "-c", - <<-EOT - changed_files=$(git diff $${COMMIT_SHA}^1 --name-only -r) - dags=$(echo "$changed_files" | xargs basename | sort | uniq ) - - for dag in $dags; do - echo "Found change in DAG: $dag" - (cd dags && zip /workspace/$dag.zip $dag) - done - EOT - ] - } - step { - id = "push-to-bucket" - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "/workspace/*.zip", "${google_composer_environment.cluster.config.0.dag_gcs_prefix}/"] - } - } - - depends_on = [google_composer_environment.cluster, google_cloudbuildv2_repository.repo] -} diff --git a/5-app-infra/4-service-catalog-repo/modules/composer/terraform.tfvars.example b/5-app-infra/4-service-catalog-repo/modules/composer/terraform.tfvars.example deleted file mode 100644 index 3d4db2c1..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/composer/terraform.tfvars.example +++ /dev/null @@ -1,29 +0,0 @@ -name = "isolated-composer-env10" -environment = "development" -project_id = "prj-c-composer" -cmek_project_id = "prj-c-bu3cmek-wo8w" -region = "us-central1" -maintenance_window = { - start_time = "2023-01-01T01:00:00Z" - end_time = "2023-01-01T13:00:00Z" - recurrence = "FREQ=WEEKLY;BYDAY=SU" -} -airflow_config_overrides = { "core-dags_are_paused_at_creation" = "True" } -env_variables = { "EXAMPLE_VAR" = "value" } -image_version = "composer-2.5.2-airflow-2.6.3" - -web_server_allowed_ip_ranges = [ - { - value = "192.168.100.0/24" - description = "Office network" - }, - { - value = "192.168.101.0/24" - description = "Home network" - } -] - -github_name_prefix = "github-composer-cloudbuild" -github_app_installation_id = "APP_INSTALATION_ID_HERE" -github_api_token = "GITHUB_API_TOKEN_HERE" -github_remote_uri = "LINK_TO_GITHUB_REPO_CONTAINING_DAGS" diff --git a/5-app-infra/4-service-catalog-repo/modules/composer/variables.tf b/5-app-infra/4-service-catalog-repo/modules/composer/variables.tf deleted file mode 100644 index ea77a199..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/composer/variables.tf +++ /dev/null @@ -1,138 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "name" { - type = string - description = "name of the Composer environment" -} - -variable "region" { - type = string - description = "The resource region, one of [us-central1, us-east4]." - default = "us-central1" - validation { - condition = contains(["us-central1", "us-east4"], var.region) - error_message = "Region must be one of [us-central1, us-east4]." - } -} - -variable "labels" { - type = map(string) - description = "The resource labels (a map of key/value pairs) to be applied to the Cloud Composer." - default = {} -} - -variable "maintenance_window" { - type = object({ - start_time = string - end_time = string - recurrence = string - }) - - description = "The configuration settings for Cloud Composer maintenance window." - - # Set Start time, Timezone, Days, and Length, so that combined time for the - # specified schedule is at least 12 hours in a 7-day rolling window. For example, - # a period of 4 hours every Monday, Wednesday, and Friday provides the required amount of time. - - # 12-hour maintenance window between 01:00 and 13:00 (UTC) on Sundays - default = { - start_time = "2021-01-01T01:00:00Z" - end_time = "2021-01-01T13:00:00Z" - recurrence = "FREQ=WEEKLY;BYDAY=SU" - } -} - -################################################ -# software_config # -################################################ -variable "airflow_config_overrides" { - type = map(string) - description = "Airflow configuration properties to override. Property keys contain the section and property names, separated by a hyphen, for example \"core-dags_are_paused_at_creation\"." - default = {} -} - -variable "env_variables" { - type = map(any) - description = "Additional environment variables to provide to the Apache Airflow scheduler, worker, and webserver processes. Environment variable names must match the regular expression [a-zA-Z_][a-zA-Z0-9_]*. They cannot specify Apache Airflow software configuration overrides (they cannot match the regular expression AIRFLOW__[A-Z0-9_]+__[A-Z0-9_]+), and they cannot match any of the following reserved names: [AIRFLOW_HOME,C_FORCE_ROOT,CONTAINER_NAME,DAGS_FOLDER,GCP_PROJECT,GCS_BUCKET,GKE_CLUSTER_NAME,SQL_DATABASE,SQL_INSTANCE,SQL_PASSWORD,SQL_PROJECT,SQL_REGION,SQL_USER]" - default = {} -} - -variable "image_version" { - type = string - description = "The version of the aiflow running in the cloud composer environment." - default = "composer-2.5.2-airflow-2.6.3" - validation { - condition = can(regex("^composer-([2-9]|[1-9][0-9]+)\\..*$", var.image_version)) - error_message = "The airflow_image_version must be GCP Composer version 2 or higher (e.g., composer-2.x.x-airflow-x.x.x)." - } -} - -variable "pypi_packages" { - type = map(string) - description = " Custom Python Package Index (PyPI) packages to be installed in the environment. Keys refer to the lowercase package name (e.g. \"numpy\")." - default = {} -} - -variable "python_version" { - description = "The default version of Python used to run the Airflow scheduler, worker, and webserver processes." - type = string - default = "3" -} - -variable "web_server_allowed_ip_ranges" { - description = "The network-level access control policy for the Airflow web server. If unspecified, no network-level access restrictions will be applied." - default = null - type = list(object({ - value = string - description = string - })) -} - -variable "github_remote_uri" { - description = "Url of your github repo" - type = string -} - -variable "github_name_prefix" { - description = "A name for your github connection to cloubuild" - type = string - default = "github-modules" -} - -variable "github_app_installation_id" { - description = "The app installation ID that was created when installing Google Cloud Build in Github: https://github.com/apps/google-cloud-build" - type = number - -} - -variable "service_account_prefix" { - description = "Name prefix to use for service accounts." - type = string - default = "sa" -} - -variable "project_id" { - description = "Optional project ID where Cloud Composer Environment is created." - type = string - default = null -} - -variable "github_secret_name" { - description = "Name of the github secret to extract github token info" - type = string - default = "github-api-token" -} diff --git a/5-app-infra/4-service-catalog-repo/modules/composer/vpc.tf b/5-app-infra/4-service-catalog-repo/modules/composer/vpc.tf deleted file mode 100644 index db633f28..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/composer/vpc.tf +++ /dev/null @@ -1,288 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -module "vpc" { - source = "terraform-google-modules/network/google" - version = "~> 8.1" - - project_id = data.google_project.project.project_id - network_name = local.network_name - routing_mode = "REGIONAL" - - subnets = [ - { - subnet_name = "composer-primary-use4" - subnet_ip = local.composer_node_use4 - subnet_region = "us-east4" - subnet_private_access = true - subnet_flow_logs = "true" - }, - { - subnet_name = "composer-primary-usc1" - subnet_ip = local.composer_node_usc1 - subnet_region = "us-central1" - subnet_private_access = true - subnet_flow_logs = "true" - } - ] - - secondary_ranges = { - composer-primary-use4 = [ - { - range_name = "pods-primary-use4" - ip_cidr_range = local.pods_use4 - }, - { - range_name = "composer-services-primary-use4" - ip_cidr_range = local.services_use4 - }, - ] - - composer-primary-usc1 = [ - { - range_name = "pods-primary-usc1" - ip_cidr_range = local.pods_usc1 - }, - { - range_name = "composer-services-primary-usc1" - ip_cidr_range = local.services_usc1 - } - ] - } - - ingress_rules = [ - { - name = "allow-internal" - description = "Allow internal traffic within the VPC" - source_tags = ["internal"] - destination_ranges = ["10.0.0.0/8"] - allow = [ - { - protocol = "tcp" - ports = ["0-65535"] - }, - { - protocol = "udp" - ports = ["0-65535"] - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - }, - { - name = "iap-all-to-all" - description = "Allow support for IAP connections via google source ranges" - source_ranges = data.google_netblock_ip_ranges.iap_forwarders.cidr_blocks_ipv4 - allow = [ - { - protocol = "tcp" - ports = ["22"] - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - }, - { - name = "health-check-google-to-all" - description = "Allow support for Health Check connections via google source ranges" - source_ranges = concat(data.google_netblock_ip_ranges.health_checkers.cidr_blocks_ipv4, data.google_netblock_ip_ranges.legacy_health_checkers.cidr_blocks_ipv4) - allow = [ - { - protocol = "tcp" - ports = ["80", "443"] - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - } - ] - - egress_rules = [ - { - name = "egress-health-check-composer-to-google" - description = "Allow egress for Health Check connections from composer clusters" - target_tags = ["composer-use4", "composer-usc1"] - destination_ranges = concat(data.google_netblock_ip_ranges.health_checkers.cidr_blocks_ipv4, data.google_netblock_ip_ranges.legacy_health_checkers.cidr_blocks_ipv4) - allow = [ - { - protocol = "tcp" - ports = ["80", "443"] - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - - }, - { - name = "composer-to-dns" - description = "Composer DNS access" - destination_ranges = [local.composer_node_usc1, local.composer_node_use4] - allow = [ - { - protocol = "tcp" - ports = ["53"] - }, - { - protocol = "udp" - ports = ["53"] - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - }, - { - name = "composer-use4-node-to-node" - description = "Composer node to node all comms in USE4" - target_tags = ["composer-use4"] - destination_ranges = [local.composer_node_use4] - allow = [ - { - protocol = "all" - ports = [] - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - }, - { - name = "composer-usc1-node-to-node" - description = "Composer node to node all comms in USC1" - target_tags = ["composer-usc1"] - destination_ranges = [local.composer_node_usc1] - allow = [ - { - protocol = "all" - ports = [] - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - }, - { - name = "composer-use4-node-to-master" - description = "Composer node to master all comms in USE4" - target_tags = ["composer-use4"] - destination_ranges = [local.composer_master_use4] - allow = [ - { - protocol = "all" - ports = [] - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - }, - { - name = "composer-usc1-node-to-master" - description = "Composer node to master all comms in USC1" - target_tags = ["composer-usc1"] - destination_ranges = [local.composer_master_usc1] - allow = [ - { - protocol = "all" - ports = [] - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - }, - { - name = "composer-use4-to-webserver" - description = "Composer Nodes to Web Server in USE4" - target_tags = ["composer-use4"] - destination_ranges = [local.composer_webserver_use4] - allow = [ - { - protocol = "all" - ports = [] - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - }, - { - name = "composer-usc1-to-webserver" - description = "Composer Nodes to Web Server in USC1" - target_tags = ["composer-usc1"] - destination_ranges = [local.composer_webserver_usc1] - allow = [ - { - protocol = "all" - ports = [] - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - }, - { - name = "all-to-googleapi" - description = "Access for all resources in isolated VPC to Google APIs" - destination_ranges = ["199.36.153.8/30", "199.36.153.4/30"] - allow = [ - { - protocol = "tcp" - ports = ["443"] - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - }, - { - name = "composer-v2-internal-egress" - description = "Access for all resources in isolated VPC to Google APIs" - target_tags = ["composer-usc1", "composer-use4"] - destination_ranges = [ - "10.0.0.0/8", - "172.16.0.0/12", - "192.168.0.0/16" - ] - allow = [ - { - protocol = "all" - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - }, - { - name = "deny-all-egress" - description = "Default deny egress" - target_tags = ["composer-usc1", "composer-use4"] - destination_ranges = ["0.0.0.0/0"] - allow = [ - { - protocol = "all" - } - ] - log_config = { - metadata = "INCLUDE_ALL_METADATA" - } - } - ] -} diff --git a/5-app-infra/4-service-catalog-repo/modules/metadata/README.md b/5-app-infra/4-service-catalog-repo/modules/metadata/README.md deleted file mode 100644 index 08ca1098..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/metadata/README.md +++ /dev/null @@ -1,70 +0,0 @@ - -Copyright 2024 Google LLC - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -## Requirements - -No requirements. - -## Providers - -| Name | Version | -|------|---------| -| [google](#provider\_google) | n/a | -| [google-beta](#provider\_google-beta) | n/a | - -## Modules - -No modules. - -## Resources - -| Name | Type | -|------|------| -| [google-beta_google_vertex_ai_metadata_store.store](https://registry.terraform.io/providers/hashicorp/google-beta/latest/docs/resources/google_vertex_ai_metadata_store) | resource | -| [google_kms_crypto_key.key](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_crypto_key) | data source | -| [google_kms_key_ring.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_key_ring) | data source | -| [google_project.project](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/project) | data source | -| [google_projects.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/projects) | data source | - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| [name](#input\_name) | The name of the metadata store instance | `string` | `null` | no | -| [project\_id](#input\_project\_id) | Optional Project ID. | `string` | `null` | no | -| [region](#input\_region) | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | - -## Outputs - -| Name | Description | -|------|-------------| -| [vertex\_ai\_metadata\_store](#output\_vertex\_ai\_metadata\_store) | Vertex AI Metadata Store. | - - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| name | The name of the metadata store instance | `string` | `null` | no | -| project\_id | Optional Project ID. | `string` | `null` | no | -| region | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | - -## Outputs - -| Name | Description | -|------|-------------| -| vertex\_ai\_metadata\_store | Vertex AI Metadata Store. | - - diff --git a/5-app-infra/4-service-catalog-repo/modules/metadata/data.tf b/5-app-infra/4-service-catalog-repo/modules/metadata/data.tf deleted file mode 100644 index 035a39b9..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/metadata/data.tf +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -data "google_project" "project" { - project_id = var.project_id -} - -data "google_projects" "kms" { - filter = "labels.application_name:env-kms labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} - -data "google_kms_key_ring" "kms" { - name = local.keyring_name - location = var.region - project = data.google_projects.kms.projects.0.project_id -} - -data "google_kms_crypto_key" "key" { - name = data.google_project.project.name - key_ring = data.google_kms_key_ring.kms.id -} diff --git a/5-app-infra/4-service-catalog-repo/modules/metadata/locals.tf b/5-app-infra/4-service-catalog-repo/modules/metadata/locals.tf deleted file mode 100644 index 9a1fcf38..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/metadata/locals.tf +++ /dev/null @@ -1,19 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - keyring_name = "sample-keyring" -} diff --git a/5-app-infra/4-service-catalog-repo/modules/metadata/main.tf b/5-app-infra/4-service-catalog-repo/modules/metadata/main.tf deleted file mode 100644 index e9a77821..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/metadata/main.tf +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -resource "google_vertex_ai_metadata_store" "store" { - name = var.name - provider = google-beta - description = "Vertex Metadata store" - region = var.region - project = data.google_project.project.project_id - encryption_spec { - kms_key_name = data.google_kms_crypto_key.key.id - } -} diff --git a/5-app-infra/4-service-catalog-repo/modules/metadata/outputs.tf b/5-app-infra/4-service-catalog-repo/modules/metadata/outputs.tf deleted file mode 100644 index 6c9cd94f..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/metadata/outputs.tf +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -output "vertex_ai_metadata_store" { - description = "Vertex AI Metadata Store." - value = google_vertex_ai_metadata_store.store -} diff --git a/5-app-infra/4-service-catalog-repo/modules/metadata/variables.tf b/5-app-infra/4-service-catalog-repo/modules/metadata/variables.tf deleted file mode 100644 index 403910f3..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/metadata/variables.tf +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "name" { - type = string - description = "The name of the metadata store instance" - default = null -} - -variable "region" { - type = string - description = "The resource region, one of [us-central1, us-east4]." - default = "us-central1" - validation { - condition = contains(["us-central1", "us-east4"], var.region) - error_message = "Region must be one of [us-central1, us-east4]." - } -} - -variable "project_id" { - type = string - description = "Optional Project ID." - default = null -} diff --git a/5-app-infra/4-service-catalog-repo/modules/notebook/README.md b/5-app-infra/4-service-catalog-repo/modules/notebook/README.md deleted file mode 100644 index aa6b2485..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/notebook/README.md +++ /dev/null @@ -1,137 +0,0 @@ -## Prerequisites - -#### IAM Permissions - -| Service Account | Scope | Role | -|-----------------|-------|------| -| PROJECT_NUMBER@cloudbuild.gserviceaccount.com | Project | Browser | -| | Project | Service Usage Consumer | -| | Project | Notebooks Admin | -| | Project | Compute Network Admin | -| | Project | Compute Security Admin | - -#### Organizational policies - -| Policy constraint | Scope | Value | -|-------------------|-------|-------| -| constraints/ainotebooks.requireAutoUpgradeSchedule | Project | Google-managed | -| constraints/ainotebooks.environmentOptions | Project | Google-managed | - - - -Copyright 2023 Google LLC - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -## Requirements - -No requirements. - -## Providers - -| Name | Version | -|------|---------| -| [google](#provider\_google) | 5.14.0 | - -## Modules - -No modules. - -## Resources - -| Name | Type | -|------|------| -| [google_workbench_instance.instance](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/workbench_instance) | resource | -| [google_compute_network.shared_vpc](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/compute_network) | data source | -| [google_compute_subnetwork.subnet](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/compute_subnetwork) | data source | -| [google_kms_crypto_key.key](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_crypto_key) | data source | -| [google_kms_key_ring.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_key_ring) | data source | -| [google_netblock_ip_ranges.health_checkers](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/netblock_ip_ranges) | data source | -| [google_netblock_ip_ranges.iap_forwarders](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/netblock_ip_ranges) | data source | -| [google_netblock_ip_ranges.legacy_health_checkers](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/netblock_ip_ranges) | data source | -| [google_project.project](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/project) | data source | -| [google_projects.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/projects) | data source | -| [google_projects.vpc](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/projects) | data source | - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| [accelerator\_type](#input\_accelerator\_type) | The type of accelerator to use | `string` | `"NVIDIA_TESLA_K80"` | no | -| [boot\_disk\_size\_gb](#input\_boot\_disk\_size\_gb) | (Optional) The size of the boot disk in GB attached to this instance, up to a maximum of 64000 GB (64 TB) | `string` | `"100"` | no | -| [boot\_disk\_type](#input\_boot\_disk\_type) | Possible disk types for notebook instances | `string` | `"PD_SSD"` | no | -| [boundry\_code](#input\_boundry\_code) | The boundry code for the tenant | `string` | `"001"` | no | -| [core\_count](#input\_core\_count) | number of accelerators to use | `number` | `1` | no | -| [data\_disk\_size\_gb](#input\_data\_disk\_size\_gb) | (Optional) The size of the data disk in GB attached to this instance, up to a maximum of 64000 GB (64 TB) | `string` | `"100"` | no | -| [data\_disk\_type](#input\_data\_disk\_type) | Optional. Input only. Indicates the type of the disk. Possible values are: PD\_STANDARD, PD\_SSD, PD\_BALANCED, PD\_EXTREME. | `string` | `"PD_SSD"` | no | -| [disable\_proxy\_access](#input\_disable\_proxy\_access) | (Optional) The notebook instance will not register with the proxy | `bool` | `false` | no | -| [image\_family](#input\_image\_family) | Use this VM image family to find the image; the newest image in this family will be used. | `string` | `"workbench-instances"` | no | -| [image\_name](#input\_image\_name) | Use VM image name to find the image. | `string` | `""` | no | -| [image\_project](#input\_image\_project) | The name of the Google Cloud project that this VM image belongs to. Format: projects/{project\_id} | `string` | `"cloud-notebooks-managed"` | no | -| [install\_gpu\_driver](#input\_install\_gpu\_driver) | Whether the end user authorizes Google Cloud to install GPU driver on this instance. Only applicable to instances with GPUs. | `bool` | `false` | no | -| [instance\_owners](#input\_instance\_owners) | email of the owner of the instance, e.g. alias@example.com. Only one owner is supported! | `set(string)` | n/a | yes | -| [location](#input\_location) | Notebook instance location (zone). | `string` | `"us-central1-a"` | no | -| [machine\_type](#input\_machine\_type) | type of the machine to spin up for the notebook | `string` | `"e2-standard-4"` | no | -| [name](#input\_name) | name of the notebook instance | `string` | n/a | yes | -| [project\_id](#input\_project\_id) | Optional Project ID. | `string` | `null` | no | -| [tags](#input\_tags) | The Compute Engine tags to add to instance. | `list(string)` |
[
"egress-internet"
]
| no | - -## Outputs - -| Name | Description | -|------|-------------| -| [create\_time](#output\_create\_time) | Instance creation time | -| [effective\_labels](#output\_effective\_labels) | All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services. | -| [id](#output\_id) | an identifier for the resource with format projects/{{project}}/locations/{{location}}/instances/{{name}} | -| [proxy\_uri](#output\_proxy\_uri) | The proxy endpoint that is used to access the Jupyter notebook. Only returned when the resource is in a PROVISIONED state. If needed you can utilize terraform apply -refresh-only to await the population of this value. | -| [state](#output\_state) | The state of this instance. | -| [terraform\_labels](#output\_terraform\_labels) | The combination of labels configured directly on the resource and default labels configured on the provider. | -| [update\_time](#output\_update\_time) | Instance update time. | - - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| accelerator\_type | The type of accelerator to use | `string` | `"NVIDIA_TESLA_K80"` | no | -| boot\_disk\_size\_gb | (Optional) The size of the boot disk in GB attached to this instance, up to a maximum of 64000 GB (64 TB) | `string` | `"100"` | no | -| boot\_disk\_type | Possible disk types for notebook instances | `string` | `"PD_SSD"` | no | -| boundry\_code | The boundry code for the tenant | `string` | `"001"` | no | -| core\_count | number of accelerators to use | `number` | `1` | no | -| data\_disk\_size\_gb | (Optional) The size of the data disk in GB attached to this instance, up to a maximum of 64000 GB (64 TB) | `string` | `"100"` | no | -| data\_disk\_type | Optional. Input only. Indicates the type of the disk. Possible values are: PD\_STANDARD, PD\_SSD, PD\_BALANCED, PD\_EXTREME. | `string` | `"PD_SSD"` | no | -| disable\_proxy\_access | (Optional) The notebook instance will not register with the proxy | `bool` | `false` | no | -| image\_family | Use this VM image family to find the image; the newest image in this family will be used. | `string` | `"workbench-instances"` | no | -| image\_name | Use VM image name to find the image. | `string` | `""` | no | -| image\_project | The name of the Google Cloud project that this VM image belongs to. Format: projects/{project\_id} | `string` | `"cloud-notebooks-managed"` | no | -| install\_gpu\_driver | Whether the end user authorizes Google Cloud to install GPU driver on this instance. Only applicable to instances with GPUs. | `bool` | `false` | no | -| instance\_owners | email of the owner of the instance, e.g. alias@example.com. Only one owner is supported! | `set(string)` | n/a | yes | -| location | Notebook instance location (zone). | `string` | `"us-central1-a"` | no | -| machine\_type | type of the machine to spin up for the notebook | `string` | `"e2-standard-4"` | no | -| name | name of the notebook instance | `string` | n/a | yes | -| project\_id | Optional Project ID. | `string` | `null` | no | -| tags | The Compute Engine tags to add to instance. | `list(string)` |
[
"egress-internet"
]
| no | - -## Outputs - -| Name | Description | -|------|-------------| -| create\_time | Instance creation time | -| effective\_labels | All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services. | -| id | an identifier for the resource with format projects/{{project}}/locations/{{location}}/instances/{{name}} | -| proxy\_uri | The proxy endpoint that is used to access the Jupyter notebook. Only returned when the resource is in a PROVISIONED state. If needed you can utilize terraform apply -refresh-only to await the population of this value. | -| state | The state of this instance. | -| terraform\_labels | The combination of labels configured directly on the resource and default labels configured on the provider. | -| update\_time | Instance update time. | - - diff --git a/5-app-infra/4-service-catalog-repo/modules/notebook/data.tf b/5-app-infra/4-service-catalog-repo/modules/notebook/data.tf deleted file mode 100644 index 64654ba2..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/notebook/data.tf +++ /dev/null @@ -1,65 +0,0 @@ -/** - * Copyright 2023 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -data "google_project" "project" { - project_id = var.project_id -} - -data "google_projects" "kms" { - filter = "labels.application_name:env-kms labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} - -data "google_projects" "vpc" { - filter = "labels.application_name:restricted-shared-vpc-host labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" - # filter = "labels.application_name:base-shared-vpc-host labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} - -data "google_compute_network" "shared_vpc" { - name = "vpc-${data.google_project.project.labels.env_code}-shared-restricted" - # name = "vpc-${data.google_project.project.labels.env_code}-shared-base" - project = data.google_projects.vpc.projects.0.project_id -} - -data "google_compute_subnetwork" "subnet" { - name = "sb-${data.google_project.project.labels.env_code}-shared-restricted-${local.region}" - # name = "sb-${data.google_project.project.labels.env_code}-shared-base-${local.region}" - project = data.google_projects.vpc.projects.0.project_id - region = local.region -} - -data "google_kms_key_ring" "kms" { - name = "sample-keyring" - location = local.region - project = data.google_projects.kms.projects.0.project_id -} - -data "google_kms_crypto_key" "key" { - name = data.google_project.project.name - key_ring = data.google_kms_key_ring.kms.id -} - -data "google_netblock_ip_ranges" "legacy_health_checkers" { - range_type = "legacy-health-checkers" -} - -data "google_netblock_ip_ranges" "health_checkers" { - range_type = "health-checkers" -} - -// Cloud IAP's TCP forwarding netblock -data "google_netblock_ip_ranges" "iap_forwarders" { - range_type = "iap-forwarders" -} diff --git a/5-app-infra/4-service-catalog-repo/modules/notebook/locals.tf b/5-app-infra/4-service-catalog-repo/modules/notebook/locals.tf deleted file mode 100644 index cdfed661..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/notebook/locals.tf +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2023 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - name_var = format("%s-%s", data.google_project.project.labels.env_code, var.name) - region = substr(var.location, 0, length(var.location) - 2) -} diff --git a/5-app-infra/4-service-catalog-repo/modules/notebook/main.tf b/5-app-infra/4-service-catalog-repo/modules/notebook/main.tf deleted file mode 100644 index 6686081e..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/notebook/main.tf +++ /dev/null @@ -1,95 +0,0 @@ -/** - * Copyright 2023 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -resource "google_workbench_instance" "instance" { - name = var.name - location = var.location - - gce_setup { - machine_type = var.machine_type - - dynamic "accelerator_configs" { - for_each = var.install_gpu_driver == true ? [1] : [] - content { - type = var.accelerator_type - core_count = var.core_count - } - } - disable_public_ip = true - - - dynamic "vm_image" { - for_each = var.image_family != "" ? [1] : [] - content { - project = var.image_project - family = var.image_family - } - } - - dynamic "vm_image" { - for_each = var.image_name != "" ? [1] : [] - content { - project = var.image_project - name = var.image_name - } - } - - boot_disk { - disk_type = var.boot_disk_type - disk_size_gb = var.boot_disk_size_gb - disk_encryption = "CMEK" - kms_key = data.google_kms_crypto_key.key.id - } - - data_disks { - disk_size_gb = var.data_disk_size_gb - disk_type = var.data_disk_type - disk_encryption = "CMEK" - kms_key = data.google_kms_crypto_key.key.id - } - - enable_ip_forwarding = false - - tags = var.tags - - network_interfaces { - network = data.google_compute_network.shared_vpc.id - subnet = data.google_compute_subnetwork.subnet.id - nic_type = "GVNIC" - } - - metadata = { - notebook-disable-downloads = "true" - notebook-disable-root = "true" - notebook-disable-terminal = "true" - notebook-upgrade-schedule = "00 19 * * MON" - # disable-mixer = "${var.dataproc_kernel_access ? false : true}" - disable-mixer = "false" // Enable access to Dataproc kernels - report-dns-resolution = "true" - report-event-health = "true" - terraform = "true" - } - } - - instance_owners = var.instance_owners - - disable_proxy_access = var.disable_proxy_access - - labels = { - environment = data.google_project.project.labels.environment - boundry_code = var.boundry_code - } -} diff --git a/5-app-infra/4-service-catalog-repo/modules/notebook/outputs.tf b/5-app-infra/4-service-catalog-repo/modules/notebook/outputs.tf deleted file mode 100644 index 77ab019a..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/notebook/outputs.tf +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Copyright 2023 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -output "id" { - description = "an identifier for the resource with format projects/{{project}}/locations/{{location}}/instances/{{name}}" - value = google_workbench_instance.instance.id -} - -output "proxy_uri" { - description = "The proxy endpoint that is used to access the Jupyter notebook. Only returned when the resource is in a PROVISIONED state. If needed you can utilize terraform apply -refresh-only to await the population of this value." - value = google_workbench_instance.instance.proxy_uri -} - -output "state" { - description = "The state of this instance." - value = google_workbench_instance.instance.state -} - -output "create_time" { - description = "Instance creation time" - value = google_workbench_instance.instance.create_time -} - -output "update_time" { - description = "Instance update time." - value = google_workbench_instance.instance.update_time -} - -output "terraform_labels" { - description = "The combination of labels configured directly on the resource and default labels configured on the provider." - value = google_workbench_instance.instance.terraform_labels -} - -output "effective_labels" { - description = "All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services." - value = google_workbench_instance.instance.effective_labels -} diff --git a/5-app-infra/4-service-catalog-repo/modules/notebook/variables.tf b/5-app-infra/4-service-catalog-repo/modules/notebook/variables.tf deleted file mode 100644 index c8aa2718..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/notebook/variables.tf +++ /dev/null @@ -1,150 +0,0 @@ -/** - * Copyright 2023 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "name" { - description = "name of the notebook instance" - type = string -} - -variable "location" { - type = string - description = "Notebook instance location (zone)." - default = "us-central1-a" - validation { - condition = contains(["us-central1", "us-east4"], substr(var.location, 0, length(var.location) - 2)) - error_message = "Location must be one within of [us-central1, us-east4] regions." - } -} - -variable "machine_type" { - description = "type of the machine to spin up for the notebook" - type = string - default = "e2-standard-4" -} - -variable "instance_owners" { - description = "email of the owner of the instance, e.g. alias@example.com. Only one owner is supported!" - type = set(string) -} - - -variable "accelerator_type" { - description = "The type of accelerator to use" - type = string - default = "NVIDIA_TESLA_K80" - validation { - condition = contains(["ACCELERATOR_TYPE_UNSPECIFIED", "NVIDIA_TESLA_K80", - "NVIDIA_TESLA_P100", "NVIDIA_TESLA_V100", "NVIDIA_TESLA_P4", - "NVIDIA_TESLA_T4", "NVIDIA_TESLA_T4_VWS", "NVIDIA_TESLA_P100_VWS", - "NVIDIA_TESLA_P4_VWS", "NVIDIA_TESLA_A100", "TPU_V2", "TPU_V3"], var.accelerator_type) - error_message = "Accelerator type can be one of the following: " - } -} -variable "core_count" { - type = number - default = 1 - description = "number of accelerators to use" -} - -variable "image_project" { - description = "The name of the Google Cloud project that this VM image belongs to. Format: projects/{project_id}" - type = string - # default = "deeplearning-platform-release" - default = "cloud-notebooks-managed" -} - -variable "image_family" { - description = "Use this VM image family to find the image; the newest image in this family will be used." - type = string - # default = "common-cpu-notebooks" - default = "workbench-instances" -} - -variable "image_name" { - description = "Use VM image name to find the image." - type = string - default = "" -} - -variable "install_gpu_driver" { - description = "Whether the end user authorizes Google Cloud to install GPU driver on this instance. Only applicable to instances with GPUs." - type = bool - default = false -} - -variable "boot_disk_type" { - description = "Possible disk types for notebook instances" - type = string - default = "PD_SSD" - validation { - condition = contains(["DISK_TYPE_UNSPECIFIED", "PD_STANDARD", "PD_SSD", "PD_BALANCED", "PD_EXTREME"], var.boot_disk_type) - error_message = "Illegal value for boot disk type" - } -} - -variable "boot_disk_size_gb" { - description = "(Optional) The size of the boot disk in GB attached to this instance, up to a maximum of 64000 GB (64 TB)" - type = string - default = "100" -} - -variable "data_disk_type" { - description = "Optional. Input only. Indicates the type of the disk. Possible values are: PD_STANDARD, PD_SSD, PD_BALANCED, PD_EXTREME." - type = string - default = "PD_SSD" - validation { - condition = contains(["PD_STANDARD", "PD_SSD", "PD_BALANCED", "PD_EXTREME"], var.data_disk_type) - error_message = "Illegal value for data disk type" - } -} - -variable "data_disk_size_gb" { - description = "(Optional) The size of the data disk in GB attached to this instance, up to a maximum of 64000 GB (64 TB)" - type = string - default = "100" -} - - -variable "disable_proxy_access" { - description = "(Optional) The notebook instance will not register with the proxy" - type = bool - default = false -} - -# variable "dataproc_kernel_access" { -# description = "(Optional) Enables access to Dataproc kernels." -# type = bool -# default = true -# } - -variable "boundry_code" { - description = "The boundry code for the tenant" - type = string - default = "001" -} - - -variable "project_id" { - type = string - description = "Optional Project ID." - default = null -} - -variable "tags" { - type = list(string) - description = "The Compute Engine tags to add to instance." - default = ["egress-internet"] -} diff --git a/5-app-infra/4-service-catalog-repo/modules/pubsub/README.md b/5-app-infra/4-service-catalog-repo/modules/pubsub/README.md deleted file mode 100644 index 1aa923b9..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/pubsub/README.md +++ /dev/null @@ -1,82 +0,0 @@ - -Copyright 2024 Google LLC - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -## Requirements - -No requirements. - -## Providers - -| Name | Version | -|------|---------| -| [google](#provider\_google) | n/a | -| [google-beta](#provider\_google-beta) | n/a | - -## Modules - -No modules. - -## Resources - -| Name | Type | -|------|------| -| [google-beta_google_pubsub_topic.pubsub_topic](https://registry.terraform.io/providers/hashicorp/google-beta/latest/docs/resources/google_pubsub_topic) | resource | -| [google_kms_crypto_key.key](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_crypto_key) | data source | -| [google_kms_key_ring.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_key_ring) | data source | -| [google_project.project](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/project) | data source | -| [google_projects.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/projects) | data source | - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| [locked\_regions](#input\_locked\_regions) | Regions that pubsub presistence is locked to | `list(any)` |
[
"us-central1",
"us-east4"
]
| no | -| [message\_retention\_duration](#input\_message\_retention\_duration) | Message retention duration. | `string` | `"86400s"` | no | -| [project\_id](#input\_project\_id) | Optional Project ID. | `string` | `null` | no | -| [region](#input\_region) | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | -| [topic\_name](#input\_topic\_name) | Topic name | `string` | n/a | yes | - -## Outputs - -| Name | Description | -|------|-------------| -| [pubsub\_topic](#output\_pubsub\_topic) | Pub/Sub Topic. | - - -## Security Controls - -The following table outlines which of the suggested controls for Vertex Generative AI are enabled in this module. -| Name | Control ID | NIST 800-53 | CRI Profile | Category | Source Blueprint -|------|------------|-------------|-------------|----------| ----------------| -|Customer Managed Encryption Keys for Pub/Sub Messages| PS-CO-6.1| SC-12
SC-13| PR.DS-1.1
PR.DS-1.2
PR.DS-2.1
PR.DS-2.2
PR.DS-5.1 | Recommended | Secure Foundation v4 -|Configure Message Storage Policies | PS-CO-4.1 | AC-3
AC-17
AC-20
| PR.AC-3.1
PR.AC-3.2
PR.AC-4.1
PR.AC-4.2
PR.AC-4.3
PR.AC-6.1
PR.PT-3.1
PR.PT-4.1 | Optional | ML Foundation v0.1.0-alpha.1 - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| locked\_regions | Regions that pubsub presistence is locked to | `list(any)` |
[
"us-central1",
"us-east4"
]
| no | -| message\_retention\_duration | Message retention duration. | `string` | `"86400s"` | no | -| project\_id | Optional Project ID. | `string` | `null` | no | -| region | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | -| topic\_name | Topic name | `string` | n/a | yes | - -## Outputs - -| Name | Description | -|------|-------------| -| pubsub\_topic | Pub/Sub Topic. | - - diff --git a/5-app-infra/4-service-catalog-repo/modules/pubsub/data.tf b/5-app-infra/4-service-catalog-repo/modules/pubsub/data.tf deleted file mode 100644 index 5ec06b5d..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/pubsub/data.tf +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -data "google_project" "project" { - project_id = var.project_id -} - -data "google_projects" "kms" { - filter = "labels.application_name:env-kms labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} - -data "google_kms_key_ring" "kms" { - name = local.keyring_name - location = var.region - project = data.google_projects.kms.projects.0.project_id -} - -data "google_kms_crypto_key" "key" { - name = data.google_project.project.name - key_ring = data.google_kms_key_ring.kms.id -} - diff --git a/5-app-infra/4-service-catalog-repo/modules/pubsub/locals.tf b/5-app-infra/4-service-catalog-repo/modules/pubsub/locals.tf deleted file mode 100644 index f92bce3f..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/pubsub/locals.tf +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - keyring_name = "sample-keyring" -} - diff --git a/5-app-infra/4-service-catalog-repo/modules/pubsub/main.tf b/5-app-infra/4-service-catalog-repo/modules/pubsub/main.tf deleted file mode 100644 index 329f0ca4..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/pubsub/main.tf +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -resource "google_pubsub_topic" "pubsub_topic" { - provider = google-beta - - project = data.google_project.project.project_id - name = var.topic_name - message_retention_duration = var.message_retention_duration - - #Customer Managed Encryption Keys - #Control ID: PS-CO-6.1 - #NIST 800-53: SC-12 SC-13 - #CRI Profile: PR.DS-1.1 PR.DS-1.2 PR.DS-2.1 PR.DS-2.2 PR.DS-5.1 - - kms_key_name = data.google_kms_crypto_key.key.id - - #Configure Message Storage Policies - #Control ID: PS-CO-4.1 - #NIST 800-53: AC-3 AC-17 AC-20 - #CRI Profile: PR.AC-3.1 PR.AC-3.2 PR.AC-4.1 PR.AC-4.2 PR.AC-4.3 PR.AC-6.1 PR.PT-3.1 PR.PT-4.1 - - message_storage_policy { - allowed_persistence_regions = var.locked_regions - } -} diff --git a/5-app-infra/4-service-catalog-repo/modules/pubsub/outputs.tf b/5-app-infra/4-service-catalog-repo/modules/pubsub/outputs.tf deleted file mode 100644 index 5606922c..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/pubsub/outputs.tf +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -output "pubsub_topic" { - description = "Pub/Sub Topic." - value = google_pubsub_topic.pubsub_topic -} diff --git a/5-app-infra/4-service-catalog-repo/modules/pubsub/variables.tf b/5-app-infra/4-service-catalog-repo/modules/pubsub/variables.tf deleted file mode 100644 index 8101d407..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/pubsub/variables.tf +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "project_id" { - type = string - description = "Optional Project ID." - default = null -} - -variable "message_retention_duration" { - description = "Message retention duration." - default = "86400s" - type = string -} - -variable "region" { - type = string - description = "The resource region, one of [us-central1, us-east4]." - default = "us-central1" - validation { - condition = contains(["us-central1", "us-east4"], var.region) - error_message = "Region must be one of [us-central1, us-east4]." - } -} - -variable "topic_name" { - description = "Topic name" - type = string -} - -variable "locked_regions" { - description = "Regions that pubsub presistence is locked to" - type = list(any) - default = [ - "us-central1", - "us-east4" - ] -} - diff --git a/5-app-infra/4-service-catalog-repo/modules/secrets/README.md b/5-app-infra/4-service-catalog-repo/modules/secrets/README.md deleted file mode 100644 index d003b9b8..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/secrets/README.md +++ /dev/null @@ -1,79 +0,0 @@ - -Copyright 2024 Google LLC - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -## Requirements - -No requirements. - -## Providers - -| Name | Version | -|------|---------| -| [google](#provider\_google) | n/a | - -## Modules - -No modules. - -## Resources - -| Name | Type | -|------|------| -| [google_secret_manager_secret.secret](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/secret_manager_secret) | resource | -| [google_kms_crypto_key.key](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_crypto_key) | data source | -| [google_kms_key_ring.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_key_ring) | data source | -| [google_project.project](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/project) | data source | -| [google_projects.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/projects) | data source | -| [google_pubsub_topic.secret_rotations](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/pubsub_topic) | data source | - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| [project\_id](#input\_project\_id) | Optional Project ID. | `string` | `null` | no | -| [region](#input\_region) | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | -| [secret\_names](#input\_secret\_names) | Names of the secrets to be created. | `list(string)` | n/a | yes | - -## Outputs - -| Name | Description | -|------|-------------| -| [secret\_manager](#output\_secret\_manager) | Secret Manager resource. | - - -## Security Controls - -The following table outlines which of the suggested controls for Vertex Generative AI are enabled in this module. -| Name | Control ID | NIST 800-53 | CRI Profile | Category | Source Blueprint -|------|------------|-------------|-------------|----------| ----------------| -|Customer Managed Encryption Keys| COM-CO-2.3| SC-12
SC-13 | PR.DS-1.1
PR.DS-2.1
PR.DS-2.2
PR.DS-5.1 | Recommended | Secure Foundation v4 -|Automatic Secret Replication| SM-CO-6.1| SC-12
SC-13| None | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 -|Set up Automatic Rotation of Secrets| SM-CO-6.2| SC-12
SC-13| None | Recommended based on customer use case | ML Foundation v0.1.0-alpha.1 - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| project\_id | Optional Project ID. | `string` | `null` | no | -| region | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | -| secret\_names | Names of the secrets to be created. | `list(string)` | n/a | yes | - -## Outputs - -| Name | Description | -|------|-------------| -| secret\_manager | Secret Manager resource. | - - diff --git a/5-app-infra/4-service-catalog-repo/modules/secrets/data.tf b/5-app-infra/4-service-catalog-repo/modules/secrets/data.tf deleted file mode 100644 index b243d811..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/secrets/data.tf +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -data "google_project" "project" { - project_id = var.project_id -} - -data "google_projects" "kms" { - filter = "labels.application_name:env-kms labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} - -data "google_kms_key_ring" "kms" { - name = local.keyring_name - location = var.region - project = data.google_projects.kms.projects.0.project_id -} - -data "google_kms_crypto_key" "key" { - name = data.google_project.project.name - key_ring = data.google_kms_key_ring.kms.id -} - -data "google_pubsub_topic" "secret_rotations" { - name = local.pubsub_topic_name - project = data.google_project.project.project_id -} diff --git a/5-app-infra/4-service-catalog-repo/modules/secrets/locals.tf b/5-app-infra/4-service-catalog-repo/modules/secrets/locals.tf deleted file mode 100644 index 5defd111..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/secrets/locals.tf +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - keyring_name = "sample-keyring" - pubsub_topic_name = "secret-rotation-notifications" -} diff --git a/5-app-infra/4-service-catalog-repo/modules/secrets/main.tf b/5-app-infra/4-service-catalog-repo/modules/secrets/main.tf deleted file mode 100644 index c20104c1..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/secrets/main.tf +++ /dev/null @@ -1,56 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -resource "google_secret_manager_secret" "secret" { - for_each = toset(var.secret_names) - secret_id = each.key - - project = data.google_project.project.project_id - - #Set up Automatic Rotation of Secrets - #Control ID: SM-CO-6.2 - #NIST 800-53: SC-12 SC-13 - - rotation { - next_rotation_time = formatdate("YYYY-MM-DD'T'hh:mm:ss'Z'", timeadd(timestamp(), "720h")) - rotation_period = "43200s" - } - - topics { - name = data.google_pubsub_topic.secret_rotations.id - } - - #Automatic Secret Replication - #Control ID: SM-CO-6.1 - #NIST 800-53: SC-12 SC-13 - - replication { - user_managed { - replicas { - location = data.google_kms_key_ring.kms.location - - #Customer Managed Encryption Keys - #Control ID: COM-CO-2.3 - #NIST 800-53: SC-12 SC-13 - #CRI Profile: PR.DS-1.1 PR.DS-1.2 PR.DS-2.1 PR.DS-2.2 PR.DS-5.1 - - customer_managed_encryption { - kms_key_name = data.google_kms_crypto_key.key.id - } - } - } - } -} diff --git a/5-app-infra/4-service-catalog-repo/modules/secrets/outputs.tf b/5-app-infra/4-service-catalog-repo/modules/secrets/outputs.tf deleted file mode 100644 index 6ad27497..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/secrets/outputs.tf +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -output "secret_manager" { - description = "Secret Manager resource." - value = google_secret_manager_secret.secret -} diff --git a/5-app-infra/4-service-catalog-repo/modules/secrets/variables.tf b/5-app-infra/4-service-catalog-repo/modules/secrets/variables.tf deleted file mode 100644 index 517c8158..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/secrets/variables.tf +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "project_id" { - type = string - description = "Optional Project ID." - default = null -} - -variable "region" { - type = string - description = "The resource region, one of [us-central1, us-east4]." - default = "us-central1" - validation { - condition = contains(["us-central1", "us-east4"], var.region) - error_message = "Region must be one of [us-central1, us-east4]." - } -} - -variable "secret_names" { - type = list(string) - description = "Names of the secrets to be created." -} diff --git a/5-app-infra/4-service-catalog-repo/modules/tensorboard/README.md b/5-app-infra/4-service-catalog-repo/modules/tensorboard/README.md deleted file mode 100644 index 9716b12b..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/tensorboard/README.md +++ /dev/null @@ -1,69 +0,0 @@ - -Copyright 2024 Google LLC - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -## Requirements - -No requirements. - -## Providers - -| Name | Version | -|------|---------| -| [google](#provider\_google) | n/a | - -## Modules - -No modules. - -## Resources - -| Name | Type | -|------|------| -| [google_vertex_ai_tensorboard.tensorboard](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/vertex_ai_tensorboard) | resource | -| [google_kms_crypto_key.key](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_crypto_key) | data source | -| [google_kms_key_ring.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/kms_key_ring) | data source | -| [google_project.project](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/project) | data source | -| [google_projects.kms](https://registry.terraform.io/providers/hashicorp/google/latest/docs/data-sources/projects) | data source | - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| [name](#input\_name) | The name of the metadata store instance | `string` | `null` | no | -| [project\_id](#input\_project\_id) | Optional Project ID. | `string` | `null` | no | -| [region](#input\_region) | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | - -## Outputs - -| Name | Description | -|------|-------------| -| [vertex\_ai\_tensorboard](#output\_vertex\_ai\_tensorboard) | TensorBoard resource. | - - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| name | The name of the metadata store instance | `string` | `null` | no | -| project\_id | Optional Project ID. | `string` | `null` | no | -| region | The resource region, one of [us-central1, us-east4]. | `string` | `"us-central1"` | no | - -## Outputs - -| Name | Description | -|------|-------------| -| vertex\_ai\_tensorboard | TensorBoard resource. | - - diff --git a/5-app-infra/4-service-catalog-repo/modules/tensorboard/data.tf b/5-app-infra/4-service-catalog-repo/modules/tensorboard/data.tf deleted file mode 100644 index 881d3bbe..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/tensorboard/data.tf +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -data "google_project" "project" { - project_id = var.project_id -} - -data "google_projects" "kms" { - filter = "labels.application_name:env-kms labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} - -data "google_kms_key_ring" "kms" { - name = local.keyring_name - location = var.region - project = data.google_projects.kms.projects.0.project_id -} - -data "google_kms_crypto_key" "key" { - name = data.google_project.project.name - key_ring = data.google_kms_key_ring.kms.id -} - diff --git a/5-app-infra/4-service-catalog-repo/modules/tensorboard/locals.tf b/5-app-infra/4-service-catalog-repo/modules/tensorboard/locals.tf deleted file mode 100644 index 9a1fcf38..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/tensorboard/locals.tf +++ /dev/null @@ -1,19 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - keyring_name = "sample-keyring" -} diff --git a/5-app-infra/4-service-catalog-repo/modules/tensorboard/main.tf b/5-app-infra/4-service-catalog-repo/modules/tensorboard/main.tf deleted file mode 100644 index 9b4ace46..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/tensorboard/main.tf +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -resource "google_vertex_ai_tensorboard" "tensorboard" { - display_name = var.name - description = "Vertex tensorboard instance with cmek" - labels = {} - region = var.region - project = data.google_project.project.project_id - encryption_spec { - kms_key_name = data.google_kms_crypto_key.key.id - } -} diff --git a/5-app-infra/4-service-catalog-repo/modules/tensorboard/outputs.tf b/5-app-infra/4-service-catalog-repo/modules/tensorboard/outputs.tf deleted file mode 100644 index 36691c0d..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/tensorboard/outputs.tf +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -output "vertex_ai_tensorboard" { - description = "TensorBoard resource." - value = google_vertex_ai_tensorboard.tensorboard -} diff --git a/5-app-infra/4-service-catalog-repo/modules/tensorboard/variables.tf b/5-app-infra/4-service-catalog-repo/modules/tensorboard/variables.tf deleted file mode 100644 index 403910f3..00000000 --- a/5-app-infra/4-service-catalog-repo/modules/tensorboard/variables.tf +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "name" { - type = string - description = "The name of the metadata store instance" - default = null -} - -variable "region" { - type = string - description = "The resource region, one of [us-central1, us-east4]." - default = "us-central1" - validation { - condition = contains(["us-central1", "us-east4"], var.region) - error_message = "Region must be one of [us-central1, us-east4]." - } -} - -variable "project_id" { - type = string - description = "Optional Project ID." - default = null -} diff --git a/5-app-infra/5-vpc-sc/README.md b/5-app-infra/5-vpc-sc/README.md deleted file mode 100644 index 88fd2d0d..00000000 --- a/5-app-infra/5-vpc-sc/README.md +++ /dev/null @@ -1,175 +0,0 @@ -# 5-app-infra - -This repo is part of a multi-part guide that shows how to configure and deploy -the example.com reference architecture described in -[Google Cloud security foundations guide](https://cloud.google.com/architecture/security-foundations). The following table lists the parts of the guide. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
0-bootstrapBootstraps a Google Cloud organization, creating all the required resources -and permissions to start using the Cloud Foundation Toolkit (CFT). This -step also configures a CI/CD Pipeline for foundations code in subsequent -stages.
1-orgSets up top-level shared folders, monitoring and networking projects, -organization-level logging, and baseline security settings through -organizational policies.
2-environmentsSets up development, non-production, and production environments within the -Google Cloud organization that you've created.
3-networks-dual-svpcSets up base and restricted shared VPCs with default DNS, NAT (optional), -Private Service networking, VPC service controls, on-premises Dedicated -Interconnect, and baseline firewall rules for each environment. It also sets -up the global DNS hub.
3-networks-hub-and-spokeSets up base and restricted shared VPCs with all the default configuration -found on step 3-networks-dual-svpc, but here the architecture will be based on the -Hub and Spoke network model. It also sets up the global DNS hub
4-projectsSets up a folder structure, projects, and an application infrastructure pipeline for applications, - which are connected as service projects to the shared VPC created in the previous stage.
5-app-infra 5-vpc-sc(this file)A project folder structure which expands upon all projects created in 4-projects
- -For an overview of the architecture and the parts, see the -[terraform-google-enterprise-genai README](https://github.com/terraform-google-modules/terraform-google-enterprise-genai) -file. - -## VPC-SC - -By now, `artifact-publish` and `service-catalog` have been deployed. The projects inflated under `6-machine-learning` are set in a service perimiter for added security. As such, several services and accounts must be given ingress and egress policies before `6-machine-learning` has been deployed. - -cd into gcp-networks - - ```bash - cd ../gcp-networks - ``` - -Below, you can find the values that will need to be applied to `common.auto.tfvars` and your `development.auto.tfvars`, `non-production.auto.tfvars` & `production.auto.tfvars`. - -In `common.auto.tfvars` update your `perimeter_additional_members` to include: -``` -"serviceAccount:sa-tf-cb-bu3-machine-learning@[prj-c-bu3infra-pipeline-project-id].iam.gserviceaccount.com" -"serviceAccount:sa-terraform-env@[prj-b-seed-project-id].iam.gserviceaccount.com" -"serviceAccount:service-[prj-d-logging-project-number]@gs-project-accounts.iam.gserviceaccount.com" -"serviceAccount:[prj-d-machine-learning-project-number]@cloudbuild.gserviceaccount.com" -``` - - - In each respective environment folders, update your `development.auto.tfvars`, `non-production.auto.tfvars` & `production.auto.tfvars` to include these changes under `ingress_policies` - -You can find the `sources.access_level` information by going to `Security` in your GCP Organization. -Once there, select the perimeter that is associated with the environment (eg. `development`). Copy the string under Perimeter Name and place it under `YOUR_ACCESS_LEVEL` - - -## Ingress Policies - - ``` - ingress_policies = [ - // users - { - "from" = { - "identity_type" = "ANY_IDENTITY" - "sources" = { - "access_level" = "[YOUR_ACCESS_LEVEL]" - } - }, - "to" = { - "resources" = [ - "projects/[prj-[your-environment-shared-restricted-project-number]", - "projects/[prj-[your-environment-kms-project-number]", - "projects/[prj-[your-environment-bu3machine-learning-number]", - ] - "operations" = { - "compute.googleapis.com" = { - "methods" = ["*"] - } - "dns.googleapis.com" = { - "methods" = ["*"] - } - "logging.googleapis.com" = { - "methods" = ["*"] - } - "storage.googleapis.com" = { - "methods" = ["*"] - } - "cloudkms.googleapis.com" = { - "methods" = ["*"] - } - "iam.googleapis.com" = { - "methods" = ["*"] - } - "cloudresourcemanager.googleapis.com" = { - "methods" = ["*"] - } - "pubsub.googleapis.com" = { - "methods" = ["*"] - } - "secretmanager.googleapis.com" = { - "methods" = ["*"] - } - "aiplatform.googleapis.com" = { - "methods" = ["*"] - } - "composer.googleapis.com" = { - "methods" = ["*"] - } - "cloudbuild.googleapis.com" = { - "methods" = ["*"] - } - "bigquery.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, - ] - ``` - -## Egress Policies - -For your DEVELOPMENT.AUTO.TFVARS file, also include this as an egress policy: - -```bash -egress_policies = [ - // notebooks - { - "from" = { - "identity_type" = "" - "identities" = [ - "serviceAccount:service-[prj-d-bu3machine-learning-project-number]@gcp-sa-notebooks.iam.gserviceaccount.com", - "serviceAccount:service-[prj-d-bu3machine-learning-project-number]@compute-system.iam.gserviceaccount.com", - ] - }, - "to" = { - "resources" = ["projects/[prj-d-kms-project-number]"] - "operations" = { - "compute.googleapis.com" = { - "methods" = ["*"] - } - "cloudkms.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, -] -``` diff --git a/5-app-infra/6-machine-learning/README.md b/5-app-infra/6-machine-learning/README.md deleted file mode 100644 index e10dd1e9..00000000 --- a/5-app-infra/6-machine-learning/README.md +++ /dev/null @@ -1,407 +0,0 @@ -# 5-app-infra - -This repo is part of a multi-part guide that shows how to configure and deploy -the example.com reference architecture described in -[Google Cloud security foundations guide](https://cloud.google.com/architecture/security-foundations). The following table lists the parts of the guide. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
0-bootstrapBootstraps a Google Cloud organization, creating all the required resources -and permissions to start using the Cloud Foundation Toolkit (CFT). This -step also configures a CI/CD Pipeline for foundations code in subsequent -stages.
1-orgSets up top-level shared folders, monitoring and networking projects, -organization-level logging, and baseline security settings through -organizational policies.
2-environmentsSets up development, non-production, and production environments within the -Google Cloud organization that you've created.
3-networks-dual-svpcSets up base and restricted shared VPCs with default DNS, NAT (optional), -Private Service networking, VPC service controls, on-premises Dedicated -Interconnect, and baseline firewall rules for each environment. It also sets -up the global DNS hub.
3-networks-hub-and-spokeSets up base and restricted shared VPCs with all the default configuration -found on step 3-networks-dual-svpc, but here the architecture will be based on the -Hub and Spoke network model. It also sets up the global DNS hub
4-projectsSets up a folder structure, projects, and an application infrastructure pipeline for applications, - which are connected as service projects to the shared VPC created in the previous stage.
5-app-infra 6-machine-learning(this file)Deploys modules based on the modules created in 3-service-catalog
- -For an overview of the architecture and the parts, see the -[terraform-google-enterprise-genai README](https://github.com/terraform-google-modules/terraform-google-enterprise-genai) -file. - -## Purpose - - - -## Prerequisites - -1. 0-bootstrap executed successfully. -1. 1-org executed successfully. -1. 2-environments executed successfully. -1. 3-networks executed successfully. -1. 4-projects executed successfully. - -### Troubleshooting - -Please refer to [troubleshooting](../docs/TROUBLESHOOTING.md) if you run into issues during this step. - -## Usage - -**Note:** If you are using MacOS, replace `cp -RT` with `cp -R` in the relevant -commands. The `-T` flag is needed for Linux, but causes problems for MacOS. - -You will need a github repository set up for this step. This repository houses the DAG's for composer. As of this writing, the structure is as follows: - ``` - . - ├── README.md - └── dags - ├── hello_world.py - └── strings.py - ``` -Add in your dags in the `dags` folder. Any changes to this folder will trigger a pipeline and place the dags in the appropriate composer environment depending on which branch it is pushed to (`development`, `non-production`, `production`) - -Have a github token for access to your repository ready, along with an [Application Installation Id](https://cloud.google.com/build/docs/automating-builds/github/connect-repo-github#connecting_a_github_host_programmatically) and the remote uri to your repository. - -These environmental project inflations are closely tied to the `service-catalog` project that have already deployed. By now, the `bu3-service-catalog` should have been inflated. `service-catalog` contains modules that are being deployed in an interactive (development) environment. Since they already exist; they can be used as terraform modules for operational (non-production, production) environments. This was done in order to avoid code redundancy. One area for all `machine-learning` deployments. - -Under `modules/base_env/main.tf` you will notice all module calls are using `git` links as sources. These links refer to the `service-catalog` cloud source repository we have already set up. - -Step 12 in "Deploying with Cloud Build" highlights the necessary steps needed to point the module resources to the correct location. - -### Deploying with Cloud Build - -1. Clone the `gcp-policies` repo based on the Terraform output from the `0-bootstrap` step. -Clone the repo at the same level of the `terraform-google-enterprise-genai` folder, the following instructions assume this layout. -Run `terraform output cloudbuild_project_id` in the `0-bootstrap` folder to get the Cloud Build Project ID. - - ```bash - export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${INFRA_PIPELINE_PROJECT_ID} - - gcloud source repos clone gcp-policies gcp-policies-app-infra --project=${INFRA_PIPELINE_PROJECT_ID} - ``` - - **Note:** `gcp-policies` repo has the same name as the repo created in step `1-org`. In order to prevent a collision, the previous command will clone this repo in the folder `gcp-policies-app-infra`. - -1. Navigate into the repo and copy contents of policy-library to new repo. All subsequent steps assume you are running them - from the gcp-policies-app-infra directory. If you run them from another directory, - adjust your copy paths accordingly. - - ```bash - cd gcp-policies-app-infra - git checkout -b main - - cp -RT ../terraform-google-enterprise-genai/policy-library/ . - ``` - -1. Commit changes and push your main branch to the new repo. - - ```bash - git add . - git commit -m 'Initialize policy library repo' - - git push --set-upstream origin main - ``` - -1. Navigate out of the repo. - - ```bash - cd .. - ``` - -1. Clone the `bu3-machine-learning` repo. - - ```bash - gcloud source repos clone bu3-machine-learning --project=${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Navigate into the repo, change to non-main branch and copy contents of foundation to new repo. - All subsequent steps assume you are running them from the bu3-machine-learning directory. - If you run them from another directory, adjust your copy paths accordingly. - - ```bash - cd bu3-machine-learning - git checkout -b plan - - cp -RT ../terraform-google-enterprise-genai/5-app-infra/6-machine-learning/ . - cp ../terraform-google-enterprise-genai/build/cloudbuild-tf-* . - cp ../terraform-google-enterprise-genai/build/tf-wrapper.sh . - chmod 755 ./tf-wrapper.sh - ``` - -1. Rename `common.auto.example.tfvars` to `common.auto.tfvars`. - - ```bash - mv common.auto.example.tfvars common.auto.tfvars - ``` - -1. Update the `common.auto.tfvars` file with your github app installation id, along with the url of your repository. - ```bash - GITHUB_APP_ID="YOUR-GITHUB-APP-ID-HERE" - GITHUB_REMOTE_URI="YOUR-GITHUB-REMOTE-URI" - - sed -i "s/GITHUB_APP_ID/${GITHUB_APP_ID}/" ./common.auto.tfvars - sed -i "s/GITHUB_REMOTE_URI/${GITHUB_REMOTE_URI}/" ./common.auto.tfvars - ``` - -1. Use `terraform output` to get the project backend bucket value from 0-bootstrap. - - ```bash - export remote_state_bucket=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw projects_gcs_bucket_tfstate) - echo "remote_state_bucket = ${remote_state_bucket}" - sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars - ``` - -1. Use `terraform output` to retrieve the Service Catalog project-id from the projects step and update values in `module/base_env`. - - ```bash - export service_catalog_project_id=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -raw service_catalog_project_id) - echo "service_catalog_project_id = ${service_catalog_project_id}" - - ## Linux - sed -i "s/SERVICE_CATALOG_PROJECT_ID/${service_catalog_project_id}/g" ./modules/base_env/main.tf - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-machine-learning"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - ## Linux - for i in `find . -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - - ## MacOS - for i in `find . -name 'backend.tf'`; do sed -i "" "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` -1. Update `modules/base_env/main.tf` with the name of service catalog project id to complete the git fqdn for module sources: - ```bash - export service_catalog_project_id=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -raw service_catalog_project_id) - - ##LINUX - sed -i "s/SERVICE-CATALOG-PROJECT-ID/${service_catalog_project_id}/" ./modules/base_env/main.tf - - ##MacOS - sed -i "" "s/SERVICE-CATALOG-PROJECT-ID/${service_catalog_project_id}/" ./modules/base_env/main.tf - ``` -1. Commit changes. - - ```bash - git add . - git commit -m 'Initialize repo' - ``` -1. Composer will rely on DAG's from a github repository. In `4-projects`, a secret 'github-api-token' was created to house your github's api access key. We need to create a new version for this secret which will be used in the composer module which is called in the `base_env` folder. Use the script below to add the secrets into each machine learnings respective environment: - ```bash - envs=(development non-production production) - project_ids=() - github_token = "YOUR-GITHUB-TOKEN" - - for env in "${envs[@]}"; do - output=$(terraform -chdir="../gcp-projects/business_unit_3/${env}" output -raw machine_learning_project_id) - project_ids+=("$output") - done - - for project in "${project_ids[@]}"; do - echo -n $github_token | gcloud secrets versions add github-api-token --data-file=- --project=${project} - done - ``` -1. Push your plan branch to trigger a plan for all environments. Because the - _plan_ branch is not a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), pushing your _plan_ - branch triggers _terraform plan_ but not _terraform apply_. Review the plan output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git push --set-upstream origin plan - ``` - -1. Merge changes to development. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), - pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ``` - git checkout -b development - git push origin development - ``` - -1. Merge changes to non-production. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), - pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git checkout -b non-production - git push origin non-production - ``` - -1. Merge changes to production branch. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), - pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git checkout -b production - git push origin production - ``` - -1. cd out of this directory before continuting over to `7-machine-learning-post-deployment` - - ```bash - cd .. - ``` -## Running Terraform locally - -1. The next instructions assume that you are at the same level of the `terraform-google-enterprise-genai` folder. Change into `5-app-infra` folder, copy the Terraform wrapper script and ensure it can be executed. - - ```bash - cd terraform-google-enterprise-genai/5-app-infra/projects/machine-learning - cp ../../../build/tf-wrapper.sh . - chmod 755 ./tf-wrapper.sh - ``` - -1. Rename `common.auto.example.tfvars` files to `common.auto.tfvars`. - - ```bash - mv common.auto.example.tfvars common.auto.tfvars - ``` - -1. Update `common.auto.tfvars` file with values from your environment. -1. Use `terraform output` to get the project backend bucket value from 0-bootstrap. - - ```bash - export remote_state_bucket=$(terraform -chdir="../0-bootstrap/" output -raw projects_gcs_bucket_tfstate) - echo "remote_state_bucket = ${remote_state_bucket}" - sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars - ``` - -1. Provide the user that will be running `./tf-wrapper.sh` the Service Account Token Creator role to the bu3 Terraform service account. -1. Provide the user permissions to run the terraform locally with the `serviceAccountTokenCreator` permission. - - ```bash - member="user:$(gcloud auth list --filter="status=ACTIVE" --format="value(account)")" - echo ${member} - - project_id=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${project_id} - - terraform_sa=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-machine-learning"' --raw-output) - echo ${terraform_sa} - - gcloud iam service-accounts add-iam-policy-binding ${terraform_sa} --project ${project_id} --member="${member}" --role="roles/iam.serviceAccountTokenCreator" - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-machine-learning"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -1. Update `modules/base_env/main.tf` with Service Catalog Project Id. - - ```bash - export service_catalog_project_id=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -raw service_catalog_project_id) - echo "service_catalog_project_id = ${service_catalog_project_id}" - - ## Linux - sed -i "s/SERVICE_CATALOG_PROJECT_ID/${service_catalog_project_id}/g" ./modules/base_env/main.tf - ``` - -We will now deploy each of our environments (development/production/non-production) using this script. -When using Cloud Build or Jenkins as your CI/CD tool, each environment corresponds to a branch in the repository for the `5-app-infra` step. Only the corresponding environment is applied. - -To use the `validate` option of the `tf-wrapper.sh` script, please follow the [instructions](https://cloud.google.com/docs/terraform/policy-validation/validate-policies#install) to install the terraform-tools component. - -1. Use `terraform output` to get the Infra Pipeline Project ID from 4-projects output. - - ```bash - export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${INFRA_PIPELINE_PROJECT_ID} - - export GOOGLE_IMPERSONATE_SERVICE_ACCOUNT=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-machine-learning"' --raw-output) - echo ${GOOGLE_IMPERSONATE_SERVICE_ACCOUNT} - ``` - -1. Run `init` and `plan` and review output for environment production. - - ```bash - ./tf-wrapper.sh init production - ./tf-wrapper.sh plan production - ``` - -1. Run `validate` and check for violations. - - ```bash - ./tf-wrapper.sh validate production $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Run `apply` production. - - ```bash - ./tf-wrapper.sh apply production - ``` - -1. Run `init` and `plan` and review output for environment non-production. - - ```bash - ./tf-wrapper.sh init non-production - ./tf-wrapper.sh plan non-production - ``` - -1. Run `validate` and check for violations. - - ```bash - ./tf-wrapper.sh validate non-production $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Run `apply` non-production. - - ```bash - ./tf-wrapper.sh apply non-production - ``` - -1. Run `init` and `plan` and review output for environment development. - - ```bash - ./tf-wrapper.sh init development - ./tf-wrapper.sh plan development - ``` - -1. Run `validate` and check for violations. - - ```bash - ./tf-wrapper.sh validate development $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Run `apply` development. - - ```bash - ./tf-wrapper.sh apply development - ``` - -If you received any errors or made any changes to the Terraform config or `common.auto.tfvars` you must re-run `./tf-wrapper.sh plan ` before running `./tf-wrapper.sh apply `. - -After executing this stage, unset the `GOOGLE_IMPERSONATE_SERVICE_ACCOUNT` environment variable. - -```bash -unset GOOGLE_IMPERSONATE_SERVICE_ACCOUNT -``` diff --git a/5-app-infra/6-machine-learning/business_unit_3/non-production/common.auto.tfvars b/5-app-infra/6-machine-learning/business_unit_3/non-production/common.auto.tfvars deleted file mode 120000 index 39aaa462..00000000 --- a/5-app-infra/6-machine-learning/business_unit_3/non-production/common.auto.tfvars +++ /dev/null @@ -1 +0,0 @@ -../../common.auto.tfvars \ No newline at end of file diff --git a/5-app-infra/6-machine-learning/business_unit_3/non-production/versions.tf b/5-app-infra/6-machine-learning/business_unit_3/non-production/versions.tf deleted file mode 100644 index baa38abb..00000000 --- a/5-app-infra/6-machine-learning/business_unit_3/non-production/versions.tf +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -terraform { - required_version = ">= 0.13" - - required_providers { - - google = { - source = "hashicorp/google" - version = ">= 3.77, < 6" - } - - google-beta = { - source = "hashicorp/google-beta" - version = ">= 3.77, < 6" - } - - null = { - source = "hashicorp/null" - version = "~> 3.0" - } - - random = { - source = "hashicorp/random" - version = "~> 3.1" - } - - } -} diff --git a/5-app-infra/6-machine-learning/business_unit_3/production/common.auto.tfvars b/5-app-infra/6-machine-learning/business_unit_3/production/common.auto.tfvars deleted file mode 120000 index 39aaa462..00000000 --- a/5-app-infra/6-machine-learning/business_unit_3/production/common.auto.tfvars +++ /dev/null @@ -1 +0,0 @@ -../../common.auto.tfvars \ No newline at end of file diff --git a/5-app-infra/6-machine-learning/business_unit_3/production/versions.tf b/5-app-infra/6-machine-learning/business_unit_3/production/versions.tf deleted file mode 100644 index baa38abb..00000000 --- a/5-app-infra/6-machine-learning/business_unit_3/production/versions.tf +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -terraform { - required_version = ">= 0.13" - - required_providers { - - google = { - source = "hashicorp/google" - version = ">= 3.77, < 6" - } - - google-beta = { - source = "hashicorp/google-beta" - version = ">= 3.77, < 6" - } - - null = { - source = "hashicorp/null" - version = "~> 3.0" - } - - random = { - source = "hashicorp/random" - version = "~> 3.1" - } - - } -} diff --git a/5-app-infra/7-machine-learning-post-deployment/README.md b/5-app-infra/7-machine-learning-post-deployment/README.md deleted file mode 100644 index 24267c55..00000000 --- a/5-app-infra/7-machine-learning-post-deployment/README.md +++ /dev/null @@ -1,252 +0,0 @@ -## Post Deployment - -### Big Query - In order to avoid having to specify a kms key for every query against a bigquery resource, we set the default project encryption key to the corresponding environment key in advance - ```bash - ml_project_dev=$(terraform -chdir="gcp-projects/business_unit_3/development" output -json) - ml_project_nonprd=$(terraform -chdir="gcp-projects/business_unit_3/non-production" output -json) - ml_project_prd=$(terraform -chdir="gcp-projects/business_unit_3/production" output -json) - - projects=( "$ml_project_dev" "$ml_project_nonprd" "$ml_project_prd" ) - - for project in "${projects[@]}"; do - project_id=$(echo "$project" | jq -r '.machine_learning_project_id.value') - project_key=$(echo "$project "| jq -r '.machine_learning_kms_keys.value."us-central1".id') - echo "ALTER PROJECT \`$project_id\` SET OPTIONS (\`region-us-central1.default_kms_key_name\`=\"$project_key\");" | bq query --project_id "$project_id" --nouse_legacy_sql - done - ``` - -### VPC-SC - -1. Now that machine learning's projects have all been inflated, please _return to gcp-projects_ and update COMMON.AUTO.TFVARS with this __additional__ information under `perimeter_additional_members`: - ``` - "serviceAccount:service-[prj-n-bu3machine-learning-number]@dataflow-service-producer-prod.iam.gserviceaccount.com", - "serviceAccount:[prj-n-bu3machine-learning-number]@cloudbuild.gserviceaccount.com", - "serviceAccount:[prj-n-bu3machine-learning-number]-compute@developer.gserviceaccount.com", - "serviceAccount:[prj-p-bu3machine-learning-number]@cloudbuild.gserviceaccount.com", - "serviceAccount:service-[prj-p-bu3machine-learning-number]@gcp-sa-aiplatform.iam.gserviceaccount.com", - ``` -2. optional - run the below command to generate a list of the above changes needed to COMMON.AUTO.TFVARS - ```bash - ml_n=$(terraform -chdir="gcp-projects/business_unit_3/non-production" output -raw machine_learning_project_number) - ml_p=$(terraform -chdir="gcp-projects/business_unit_3/production" output -raw machine_learning_project_number) - - echo "serviceAccount:service-${ml_n}@dataflow-service-producer-prod.iam.gserviceaccount.com", - echo "serviceAccount:${ml_n}@cloudbuild.gserviceaccount.com", - echo "serviceAccount:${ml_n}-compute@developer.gserviceaccount.com", - echo "serviceAccount:${ml_p}@cloudbuild.gserviceaccount.com", - echo "serviceAccount:service-${ml_p}@gcp-sa-aiplatform.iam.gserviceaccount.com", - ``` - -1. Many of the necessary service agents and permissions were deployed in all project environments for machine-learning. Additional entries will be needed for each environment. - -1. Add in more agents to the DEVELOPMENT.AUTO.TFVARS file under `egress_policies`. -Notably: - - * "serviceAccount:bq-[prj-d-bu3machine-learning-project-number]@bigquery-encryption.iam.gserviceaccount.com" - - This should be added under identities. It should look like this:: - ``` - egress_policies = [ - // notebooks - { - "from" = { - "identity_type" = "" - "identities" = [ - "serviceAccount:bq-[prj-d-bu3machine-learning-project-number]@bigquery-encryption.iam.gserviceaccount.com" << New Addition - "serviceAccount:service-[prj-d-bu3machine-learning-project-number]@gcp-sa-notebooks.iam.gserviceaccount.com", - "serviceAccount:service-[prj-d-bu3machine-learning-project-number]@compute-system.iam.gserviceaccount.com", - ] - }, - "to" = { - "resources" = ["projects/[prj-d-kms-project-number]"] - "operations" = { - "compute.googleapis.com" = { - "methods" = ["*"] - } - "cloudkms.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, - ] - ``` -1. Remain in DEVELOPMENT.AUTO.TFVARS and include this entry under `egress_policies`. Ensure you replace all [project numbers] with their corresponding project: - ``` - // artifact Registry - { - "from" = { - "identity_type" = "" - "identities" = [ - "serviceAccount:service-[prj-d-bu3machine-learning-number]@gcp-sa-aiplatform-cc.iam.gserviceaccount.com", - ] - }, - "to" = { - "resources" = ["projects/[prj-c-bu3artifacts-number]"] - "operations" = { - "artifactregistry.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, - // Dataflow - { - "from" = { - "identity_type" = "" - "identities" = [ - "serviceAccount:service-[prj-n-bu3machine-learning-number]@dataflow-service-producer-prod.iam.gserviceaccount.com", - ] - }, - "to" = { - "resources" = ["projects/[prj-n-bu3machine-learning-number]"] - "operations" = { - "compute.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, - ``` - -1. Under NON-PRODUCTION.AUTO.TFVARS, add these entries under `egress_policies`: - ``` - { - "from" = { - "identity_type" = "ANY_IDENTITY" - "identities" = [] - }, - "to" = { - "resources" = [ - "projects/[prj-c-bu3artifacts-number]" - ] - "operations" = { - "artifactregistry.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, - // artifact Registry - { - "from" = { - "identity_type" = "" - "identities" = [ - "serviceAccount:service-[prj-n-bu3machine-learning-number]@gcp-sa-aiplatform-cc.iam.gserviceaccount.com", - ] - }, - "to" = { - "resources" = ["projects/[prj-c-bu3artifacts-number]"] - "operations" = { - "artifactregistry.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, - // DataFlow - { - "from" = { - "identity_type" = "" - "identities" = [ - "serviceAccount:service-[prj-n-bu3machine-learning-number]@dataflow-service-producer-prod.iam.gserviceaccount.com", - ] - }, - "to" = { - "resources" = ["projects/[prj-d-shared-restricted-number]"] - "operations" = { - "compute.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, - { - "from" = { - "identity_type" = "" - "identities" = [ - "serviceAccount:[prj-n-bu3machine-learning-number]-compute@developer.gserviceaccount.com", - "serviceAccount:service-[prj-d-bu3machine-learning-number]@gcp-sa-aiplatform.iam.gserviceaccount.com", - ] - }, - "to" = { - "resources" = ["projects/[prj-p-bu3machine-learning-number]"] - "operations" = { - "aiplatform.googleapis.com" = { - "methods" = ["*"] - }, - "storage.googleapis.com" = { - "methods" = ["*"] - }, - "bigquery.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, - ``` - -1. Under PRODUCTION.AUTO.TFVARS, add these entries under `egress_policies`: - ``` - { - "from" = { - "identity_type" = "" - "identities" = [ - "serviceAccount:service-[prj-p-bu3machine-learning-number]@gcp-sa-aiplatform.iam.gserviceaccount.com", - "serviceAccount:service-[prj-p-bu3machine-learning-number]@gcp-sa-aiplatform-cc.iam.gserviceaccount.com", - "serviceAccount:cloud-cicd-artifact-registry-copier@system.gserviceaccount.com", - ] - }, - "to" = { - "resources" = [ - "projects/[prj-n-bu3machine-learning-number]", - "projects/[prj-c-bu3artifacts-number]", - ] - "operations" = { - "artifactregistry.googleapis.com" = { - "methods" = ["*"] - }, - "storage.googleapis.com" = { - "methods" = ["*"] - }, - "bigquery.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, - ``` - -## SERVICE CATALOG -Once you have set up service catalog and attempt to deploy out terraform code, there is a high chance you will encounter this error: -`Permission denied; please check you have the correct IAM permissions and APIs enabled.` -This is due to a VPC Service control error that until now, is impossible to add into the egress policy. Go to `prj-d-bu3machine-learning` project and view the logs, filtering for ERRORS. There will be a VPC Service Controls entry that has an `egressViolation`. It should look something like the following: -``` -egressViolations: [ - 0: { - servicePerimeter: "accessPolicies/1066661933618/servicePerimeters/sp_d_shared_restricted_default_perimeter_f3fv" - source: "projects/[machine-learning-project-number]" - sourceType: "Resource" - targetResource: "projects/[unknown-project-number]" - } -] -``` -we want the `unknown-project-number` here. Add this into your `egress_policies` in `3-networks` under DEVELOPMENT.AUTO.TFVARS, NON-PRODUCTION.AUTO.TFVARS & PRODUCTION.AUTO.TFVARS -``` -// Service Catalog - { - "from" = { - "identity_type" = "ANY_IDENTITY" - "identities" = [] - }, - "to" = { - "resources" = ["projects/[unknown-project-number]"] - "operations" = { - "cloudbuild.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, -``` diff --git a/5-app-infra/README.md b/5-app-infra/README.md index b41e5da6..517bc177 100644 --- a/5-app-infra/README.md +++ b/5-app-infra/README.md @@ -32,12 +32,6 @@ Interconnect, and baseline firewall rules for each environment. It also sets up the global DNS hub. -3-networks-hub-and-spoke -Sets up base and restricted shared VPCs with all the default configuration -found on step 3-networks-dual-svpc, but here the architecture will be based on the -Hub and Spoke network model. It also sets up the global DNS hub - - 4-projects Sets up a folder structure, projects, and an application infrastructure pipeline for applications, which are connected as service projects to the shared VPC created in the previous stage. @@ -50,64 +44,671 @@ Hub and Spoke network model. It also sets up the global DNS hub For an overview of the architecture and the parts, see the -[terraform-google-enterprise-genai README](https://github.com/terraform-google-modules/terraform-google-enterprise-genai) +[terraform-google-enterprise-genai README](https://github.com/GoogleCloudPlatform/terraform-google-enterprise-genai) file. ## Purpose -Folders `1-artifact-publish`, `3-serive-catalog` and `5-machine-learning` are projects that will be _expanded_ upon. In step 4, we have initiated the creation of these projects, enabled API's and assigned roles to various service accounts, service agents and cryptography keys that are needed for each respective project to operate successfully. Folders `2-artifact-publish-repo` and `4-service-catalog-repo` are seperate cloud build repositories that have their own unique piplelines configured. These are used for building out in-house Docker images for your machine-learning pipelines and terraform modules that will be used in `notebooks` in your interactive (development) environment, as well as deployment modules for your operational (non-production, production) environments respectively. +Inside the `projects` folder, the `artifact-publish` and `service-catalog` directories contain applications that will be further developed. These directories are Terraform repositories that house the configuration code for their respective applications. For instance, in the `projects/artifact-publish` directory, you will find code that configures the custom pipeline for the artifact-publish application. +> Note: Remember that in step 4-projects, the Service Catalog and Artifacts projects were created under `common` folder. + +Inside the `source_repos` folder, the folders `artifact-publish` and `service-catalog` are seperate Cloud Build Repositories that have their own unique piplelines configured. These are used for building out in-house Docker images for your machine-learning pipelines and terraform modules that can be deployed through the Service Catalog Google Cloud Product. + +This repository contain examples using modules in `notebooks` in your interactive (development) environment, as well as deployment modules for your operational (non-production, production) environments respectively. For the purposes of this demonstration, we assume that you are using Cloud Build or manual deployment. -When viewing each folder under `projects`, consider them as seperate repositories which will be used to deploy out each respective project. In the case of using Cloud Build (which is what this example is primarily based on), each folder will be placed in its own GCP cloud source repository for deployment. There is a README placed in each project folder which will highlight the necessary steps to achieve deployment. - -When deploying/expanding upon each project, you will find your Cloud Build pipelines being executed in `prj-c-bu3infra-pipeline`. - -The order of deployments for the machine-learning's project is as follows: - -* 0-gcp-polcies -* 1-artifact-publish -* 2-artifact-publish-repo -* 3-service-catalog -* 4-service-catalog-repo -* 5-vpc-sc -* 6-machine-learning -* 7-machine-learning-post-deploy - -## VPC-SC - -Be aware that for the purposes of this machine learning project, there are several projects in each respective environment that have been placed within a `service perimeter`. -As such, during your deployment process, you _will_ encounter deployment errors related to VPC-SC violations. Before continuing onto `5-app-infra/projects`, you will need to go _back_ into `3-networks-dual-svpc` and _update_ -your ingress rules. - -Below, you can find the values that will need to be applied to `common.auto.tfvars` and your `development.auto.tfvars`, ###`non-production.auto.tfvars` & `production.auto.tfvars`. - -In `common.auto.tfvars` update your `perimeter_additional_members` to include: - * the service acccount for bu3infra-pipeline: `"serviceAccount:sa-tf-cb-bu3-machine-learning@[prj-c-bu3infra-pipeline-project-id].iam.gserviceaccount.com"` - * the service account for your cicd pipeline: `"serviceAccount:sa-terraform-env@[prj-b-seed-project-id].iam.gserviceaccount.com"` - * your development environment logging bucket service account: `"serviceAccount:service-[prj-d-logging-project-number]@gs-project-accounts.iam.gserviceaccount.com"` - * your development environment service acount for cloudbuild: `"serviceAccount:[prj-d-machine-learning-project-number]@cloudbuild.gserviceaccount.com"` - - In each respective environment folders, update your `development.auto.tfvars`, `non-production.auto.tfvars` & `production.auto.tfvars` to include the changes mentioned in Ingress Policies section. - -For your DEVELOPMENT.AUTO.TFVARS file, also include the egress policy mentioned in Egress Policies section. - -Please note that this will cover some but not ALL the policies that will be needed. During deployment there will be violations that will occur which come from unknown google projects outside the scope of your organization. It will be the responsibility of the operator(s) deploying this process to view logs about the errors and make adjustments accordingly. Most notably, this was observed for Service Catalog. There will be an instance where an egress policy to be added for `cloudbuild.googleapis.com` access: - - ``` - // Service Catalog - { - "from" = { - "identity_type" = "ANY_IDENTITY" - "identities" = [] - }, - "to" = { - "resources" = ["projects/[some random google project id]"] - "operations" = { - "cloudbuild.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, - ``` +## Prerequisites + +1. 0-bootstrap executed successfully. +1. 1-org executed successfully. +1. 2-environments executed successfully. +1. 3-networks executed successfully. +1. 4-projects executed successfully. + +### Troubleshooting + +Please refer to [troubleshooting](../docs/TROUBLESHOOTING.md) if you run into issues during this step. + +## Usage + +**Note:** If you are using MacOS, replace `cp -RT` with `cp -R` in the relevant +commands. The `-T` flag is needed for Linux, but causes problems for MacOS. + +### Deploying with Cloud Build + +1. Ensure you are in a neutral directory outside any other git related repositories. + +1. Clone the `gcp-policies` repo based on the Terraform output from the `4-projects` step. +Clone the repo at the same level of the `terraform-google-enterprise-genai` folder, the following instructions assume this layout. +Run `terraform output cloudbuild_project_id` in the `4-projects` folder to get the Cloud Build Project ID. + + ```bash + export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) + echo ${INFRA_PIPELINE_PROJECT_ID} + + gcloud source repos clone gcp-policies gcp-policies-app-infra --project=${INFRA_PIPELINE_PROJECT_ID} + ``` + + **Note:** `gcp-policies` repo has the same name as the repo created in step `1-org`. In order to prevent a collision, the previous command will clone this repo in the folder `gcp-policies-app-infra`. + +1. Navigate into the repo and copy contents of policy-library to new repo. All subsequent steps assume you are running them + from the gcp-policies-app-infra directory. If you run them from another directory, + adjust your copy paths accordingly. + + ```bash + cd gcp-policies-app-infra/ + git checkout -b main + + cp -RT ../terraform-google-enterprise-genai/policy-library/ . + ``` + +1. Commit changes and push your main branch to the new repo. + + ```bash + git add . + git commit -m 'Initialize policy library repo' + + git push --set-upstream origin main + ``` + +1. Navigate out of the repo. + + ```bash + cd .. + ``` + +#### Artifacts Application + +The purpose of this step is to deploy out an artifact registry to store custom docker images. A Cloud Build pipeline is also deployed out. At the time of this writing, it is configured to attach itself to a Cloud Source Repository. The Cloud Build pipeline is responsible for building out a custom image that may be used in Machine Learning Workflows. If you are in a situation where company policy requires no outside repositories to be accessed, custom images can be used to keep access to any image internally. + +Since every workflow will have access to these images, it is deployed in the `common` folder, and keeping with the foundations structure, is listed as `shared` under this Business Unit. It will only need to be deployed once. + +The Pipeline is connected to a Google Cloud Source Repository with a simple structure: + + ``` + ├── README.md + └── images + ├── tf2-cpu.2-13:0.1 + │   └── Dockerfile + └── tf2-gpu.2-13:0.1 + └── Dockerfile + ``` +for the purposes of this example, the pipeline is configured to monitor the `main` branch of this repository. + +each folder under `images` has the full name and tag of the image that must be built. Once a change to the `main` branch is pushed, the pipeline will analyse which files have changed and build that image out and place it in the artifact repository. For example, if there is a change to the Dockerfile in the `tf2-cpu-13:0.1` folder, or if the folder itself has been renamed, it will build out an image and tag it based on the folder name that the Dockerfile has been housed in. + +Once pushed, the pipeline build logs can be accessed by navigating to the artifacts project name created in step-4: + + ```bash + terraform -chdir="gcp-projects/business_unit_3/shared/" output -raw common_artifacts_project_id + ``` + +1. Clone the `bu3-artifact-publish` repo. + + ```bash + gcloud source repos clone bu3-artifact-publish --project=${INFRA_PIPELINE_PROJECT_ID} + ``` + +1. Navigate into the repo, change to non-main branch and copy contents of genAI to new repo. + All subsequent steps assume you are running them from the bu3-artifact-publish directory. + If you run them from another directory, adjust your copy paths accordingly. + + ```bash + cd bu3-artifact-publish/ + git checkout -b plan + + cp -RT ../terraform-google-enterprise-genai/5-app-infra/projects/artifact-publish/ . + cp ../terraform-google-enterprise-genai/build/cloudbuild-tf-* . + cp ../terraform-google-enterprise-genai/build/tf-wrapper.sh . + chmod 755 ./tf-wrapper.sh + ``` + +1. Rename `common.auto.example.tfvars` to `common.auto.tfvars`. + + ```bash + mv common.auto.example.tfvars common.auto.tfvars + ``` + +1. Update the file with values from your environment and 0-bootstrap. See any of the business unit 1 envs folders [README.md](./business_unit_1/production/README.md) files for additional information on the values in the `common.auto.tfvars` file. + + ```bash + export remote_state_bucket=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw projects_gcs_bucket_tfstate) + echo "remote_state_bucket = ${remote_state_bucket}" + sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars + ``` + +1. Update `backend.tf` with your bucket from the infra pipeline output. + + ```bash + export backend_bucket=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-artifact-publish"' --raw-output) + echo "backend_bucket = ${backend_bucket}" + + for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done + ``` + +1. Commit changes. + + ```bash + git add . + git commit -m 'Initialize repo' + ``` + +1. Push your plan branch to trigger a plan for all environments. Because the + _plan_ branch is not a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), pushing your _plan_ + branch triggers _terraform plan_ but not _terraform apply_. Review the plan output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID + + ```bash + git push --set-upstream origin plan + ``` + +1. Merge changes to shared. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), + pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID + + ```bash + git checkout -b production + git push origin production + ``` + +1. `cd` out of the `bu3-artifacts-publish` repository. + + ```bash + cd .. + ``` + +1. Navigate to the project that was output from `${ARTIFACT_PROJECT_ID}` in Google's Cloud Console to view the first run of images being built. + +#### Configuring Cloud Source Repository of Artifact Application + +1. Grab the Artifact Project ID + + ```bash + export ARTIFACT_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared" output -raw common_artifacts_project_id) + echo ${ARTIFACT_PROJECT_ID} + ``` + +1. Clone the freshly minted Cloud Source Repository that was created for this project. + + ```bash + gcloud source repos clone publish-artifacts --project=${ARTIFACT_PROJECT_ID} + ``` + +1. Enter the repo folder and copy over the artifact files from `5-app-infra/source_repos/artifact-publish` folder. + + ```bash + cd publish-artifacts + git checkout -b main + + git commit -m "Initialize Repository" --allow-empty + cp -RT ../terraform-google-enterprise-genai/5-app-infra/source_repos/artifact-publish/ . + ``` + +1. Commit changes and push your main branch to the new repo. + + ```bash + git add . + git commit -m 'Build Images' + + git push --set-upstream origin main + ``` + +1. `cd` out of the `publish-artifacts` repository. + + ```bash + cd .. + ``` + +#### Service Catalog Pipeline Configuration + +This step has two main purposes: + +1. To deploy a pipeline and a bucket which is linked to a Google Cloud Repository that houses terraform modules for the use in Service Catalog. +Although Service Catalog itself must be manually deployed, the modules which will be used can still be automated. + +2. To deploy infrastructure for operational environments (ie. `non-production` & `production`.) + +The resoning behind utilizing one repository with two deployment methodologies is due to how close interactive (`development`) and operational environments are. + +The repository has the structure (truncated for brevity): + + ``` + business_unit_3 + ├── development + ├── non-production + ├── production + modules + ├── bucket + │   ├── README.md + │   ├── data.tf + │   ├── main.tf + │   ├── outputs.tf + │   ├── provider.tf + │   └── variables.tf + ├── composer + │   ├── README.md + │   ├── data.tf + │   ├── iam.roles.tf + │   ├── iam.users.tf + │   ├── locals.tf + │   ├── main.tf + │   ├── outputs.tf + │   ├── provider.tf + │   ├── terraform.tfvars.example + │   ├── variables.tf + │   └── vpc.tf + ├── cryptography + │   ├── README.md + │   ├── crypto_key + │   │   ├── main.tf + │   │   ├── outputs.tf + │   │   └── variables.tf + │   └── key_ring + │   ├── main.tf + │   ├── outputs.tf + │   └── variables.tf + ``` + +Each folder under `modules` represents a terraform module. +When there is a change in any of the terraform module folders, the pipeline will find whichever module has been changed since the last push, `tar.gz` that file and place it in a bucket for Service Catalog to access. + +This pipeline is listening to the `main` branch of this repository for changes in order for the modules to be uploaded to service catalog. + +The pipeline also listens for changes made to `plan`, `development`, `non-production` & `production` branches, this is used for deploying infrastructure to each project. + +1. Clone the `bu3-service-catalog` repo. + + ```bash + gcloud source repos clone bu3-service-catalog --project=${INFRA_PIPELINE_PROJECT_ID} + ``` + +1. Navigate into the repo, change to non-main branch and copy contents of foundation to new repo. + All subsequent steps assume you are running them from the bu3-service-catalog directory. + If you run them from another directory, adjust your copy paths accordingly. + + ```bash + cd bu3-service-catalog + git checkout -b plan + + cp -RT ../terraform-google-enterprise-genai/5-app-infra/projects/service-catalog/ . + cp ../terraform-google-enterprise-genai/build/cloudbuild-tf-* . + cp ../terraform-google-enterprise-genai/build/tf-wrapper.sh . + chmod 755 ./tf-wrapper.sh + ``` + +1. Rename `common.auto.example.tfvars` to `common.auto.tfvars`. + + ```bash + mv common.auto.example.tfvars common.auto.tfvars + ``` + +1. Update the file with values from your environment and 0-bootstrap. See any of the business unit 1 envs folders [README.md](./business_unit_1/production/README.md) files for additional information on the values in the `common.auto.tfvars` file. + + ```bash + export remote_state_bucket=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw projects_gcs_bucket_tfstate) + echo "remote_state_bucket = ${remote_state_bucket}" + sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars + ``` + +1. Update `backend.tf` with your bucket from the infra pipeline output. + + ```bash + export backend_bucket=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-service-catalog"' --raw-output) + echo "backend_bucket = ${backend_bucket}" + + for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done + ``` + +1. Commit changes. + + ```bash + git add . + git commit -m 'Initialize repo' + ``` + +1. Push your plan branch to trigger a plan for all environments. Because the + _plan_ branch is not a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), pushing your _plan_ + branch triggers _terraform plan_ but not _terraform apply_. Review the plan output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID + + ```bash + git push --set-upstream origin plan + ``` + +1. Merge changes to production. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), + pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID + + ```bash + git checkout -b production + git push origin production + ``` + +1. `cd` out of the `bu3-service-catalog` repository. + + ```bash + cd .. + ``` + +#### Configuring Cloud Source Repository of Service Catalog Solutions Pipeline + +1. Grab the Service Catalogs ID + + ```bash + export SERVICE_CATALOG_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared" output -raw service_catalog_project_id) + echo ${SERVICE_CATALOG_PROJECT_ID} + ``` + +1. Clone the freshly minted Cloud Source Repository that was created for this project. + + ```bash + gcloud source repos clone service-catalog --project=${SERVICE_CATALOG_PROJECT_ID} + ``` + +1. Enter the repo folder and copy over the service catalogs files from `5-app-infra/source_repos/service-catalog` folder. + + ```bash + cd service-catalog/ + cp -RT ../terraform-google-enterprise-genai/5-app-infra/source_repos/service-catalog/ . + git add img + git commit -m "Add img directory" + ``` + +1. Commit changes and push main branch to the new repo. + + ```bash + git add modules + git commit -m 'Initialize Service Catalog Build Repo' + + git push --set-upstream origin main + ``` + +1. `cd` out of the `service_catalog` repository. + + ```bash + cd .. + ``` + +1. Navigate to the project that was output from `${ARTIFACT_PROJECT_ID}` in Google's Cloud Console to view the first run of images being built. + +### Run Terraform locally + +#### Artifacts Application + +1. The next instructions assume that you are at the same level of the `terraform-google-enterprise-genai` folder. Change into `5-app-infra` folder, copy the Terraform wrapper script and ensure it can be executed. + + ```bash + cd terraform-google-enterprise-genai/5-app-infra/projects/artifact-publish/ + cp ../../../build/tf-wrapper.sh . + chmod 755 ./tf-wrapper.sh + ``` + +1. Rename `common.auto.example.tfvars` files to `common.auto.tfvars`. + + ```bash + mv common.auto.example.tfvars common.auto.tfvars + ``` + +1. Update `common.auto.tfvars` file with values from your environment. + +1. Use `terraform output` to get the project backend bucket value from 0-bootstrap. + + ```bash + export remote_state_bucket=$(terraform -chdir="../../../0-bootstrap/" output -raw projects_gcs_bucket_tfstate) + echo "remote_state_bucket = ${remote_state_bucket}" + sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars + ``` + +1. Provide the user that will be running `./tf-wrapper.sh` the Service Account Token Creator role to the bu3 Terraform service account. + +1. Provide the user permissions to run the terraform locally with the `serviceAccountTokenCreator` permission. + + ```bash + member="user:$(gcloud auth list --filter="status=ACTIVE" --format="value(account)")" + echo ${member} + + project_id=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) + echo ${project_id} + + terraform_sa=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-artifact-publish"' --raw-output) + echo ${terraform_sa} + + gcloud iam service-accounts add-iam-policy-binding ${terraform_sa} --project ${project_id} --member="${member}" --role="roles/iam.serviceAccountTokenCreator" + ``` + +1. Update `backend.tf` with your bucket from the infra pipeline output. + + ```bash + export backend_bucket=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-artifact-publish"' --raw-output) + echo "backend_bucket = ${backend_bucket}" + + for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done + ``` + +We will now deploy each of our environments (development/production/non-production) using this script. +When using Cloud Build or Jenkins as your CI/CD tool, each environment corresponds to a branch in the repository for the `5-app-infra` step. Only the corresponding environment is applied. + +To use the `validate` option of the `tf-wrapper.sh` script, please follow the [instructions](https://cloud.google.com/docs/terraform/policy-validation/validate-policies#install) to install the terraform-tools component. + +1. Use `terraform output` to get the Infra Pipeline Project ID from 4-projects output. + + ```bash + export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) + echo ${INFRA_PIPELINE_PROJECT_ID} + + export GOOGLE_IMPERSONATE_SERVICE_ACCOUNT=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-artifact-publish"' --raw-output) + echo ${GOOGLE_IMPERSONATE_SERVICE_ACCOUNT} + ``` + +1. Run `init` and `plan` and review output for environment shared (common). + + ```bash + ./tf-wrapper.sh init shared + ./tf-wrapper.sh plan shared + ``` + +1. Run `validate` and check for violations. + + ```bash + ./tf-wrapper.sh validate shared $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} + ``` + +1. Run `apply` shared. + + ```bash + ./tf-wrapper.sh apply shared + ``` + +If you received any errors or made any changes to the Terraform config or `common.auto.tfvars` you must re-run `./tf-wrapper.sh plan ` before running `./tf-wrapper.sh apply `. + +After executing this stage, unset the `GOOGLE_IMPERSONATE_SERVICE_ACCOUNT` environment variable. + +```bash +unset GOOGLE_IMPERSONATE_SERVICE_ACCOUNT +``` + +1. `cd` out of the `artifact-publish`. + + ```bash + cd + ``` + +1. Navigate to the project that was output from `${ARTIFACT_PROJECT_ID}` in Google's Cloud Console to view the first run of images being built. + +#### Configuring Cloud Source Repository of Artifact Application + +1. The next instructions assume that you are at the same level of the `terraform-google-enterprise-genai` folder. + +1. Grab the Artifact Project ID + + ```bash + export ARTIFACT_PROJECT_ID=$(terraform -chdir="terraform-google-enterprise-genai/4-projects/business_unit_3/shared" output -raw common_artifacts_project_id) + echo ${ARTIFACT_PROJECT_ID} + ``` + +1. Clone the freshly minted Cloud Source Repository that was created for this project. + + ```bash + gcloud source repos clone publish-artifacts --project=${ARTIFACT_PROJECT_ID} + ``` + +1. Enter the repo folder and copy over the artifact files from `5-app-infra/source_repos/artifact-publish` folder. + + ```bash + cd publish-artifacts + git checkout -b main + + git commit -m "Initialize Repository" --allow-empty + cp -RT ../terraform-google-enterprise-genai/5-app-infra/source_repos/artifact-publish/ . + ``` + +1. Commit changes and push your main branch to the new repo. + + ```bash + git add . + git commit -m 'Build Images' + + git push --set-upstream origin main + ``` + +1. `cd` out of the `publish-artifacts` repository. + + ```bash + cd .. + ``` + +#### Service Catalog Configuration + +1. The next instructions assume that you are at the same level of the `terraform-google-enterprise-genai` folder. Change into `5-app-infra` folder, copy the Terraform wrapper script and ensure it can be executed. + + ```bash + cd terraform-google-enterprise-genai/5-app-infra/projects/service-catalog/ + cp ../../../build/tf-wrapper.sh . + chmod 755 ./tf-wrapper.sh + ``` + +1. Rename `common.auto.example.tfvars` files to `common.auto.tfvars`. + + ```bash + mv common.auto.example.tfvars common.auto.tfvars + ``` + +1. Update `common.auto.tfvars` file with values from your environment. + +1. Use `terraform output` to get the project backend bucket value from 0-bootstrap. + + ```bash + export remote_state_bucket=$(terraform -chdir="../../../0-bootstrap/" output -raw projects_gcs_bucket_tfstate) + echo "remote_state_bucket = ${remote_state_bucket}" + sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars + ``` + +1. Provide the user that will be running `./tf-wrapper.sh` the Service Account Token Creator role to the bu3 Terraform service account. + +1. Provide the user permissions to run the terraform locally with the `serviceAccountTokenCreator` permission. + + ```bash + member="user:$(gcloud auth list --filter="status=ACTIVE" --format="value(account)")" + echo ${member} + + project_id=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) + echo ${project_id} + + terraform_sa=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-service-catalog"' --raw-output) + echo ${terraform_sa} + + gcloud iam service-accounts add-iam-policy-binding ${terraform_sa} --project ${project_id} --member="${member}" --role="roles/iam.serviceAccountTokenCreator" + ``` + +1. Update `backend.tf` with your bucket from the infra pipeline output. + + ```bash + export backend_bucket=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-service-catalog"' --raw-output) + echo "backend_bucket = ${backend_bucket}" + + for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done + ``` + +We will now deploy each of our environments (development/production/non-production) using this script. +When using Cloud Build or Jenkins as your CI/CD tool, each environment corresponds to a branch in the repository for the `5-app-infra` step. Only the corresponding environment is applied. + +To use the `validate` option of the `tf-wrapper.sh` script, please follow the [instructions](https://cloud.google.com/docs/terraform/policy-validation/validate-policies#install) to install the terraform-tools component. + +1. Use `terraform output` to get the Infra Pipeline Project ID from 4-projects output. + + ```bash + export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) + echo ${INFRA_PIPELINE_PROJECT_ID} + + export GOOGLE_IMPERSONATE_SERVICE_ACCOUNT=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-service-catalog"' --raw-output) + echo ${GOOGLE_IMPERSONATE_SERVICE_ACCOUNT} + ``` + +1. Run `init` and `plan` and review output for environment shared (common). + + ```bash + ./tf-wrapper.sh init shared + ./tf-wrapper.sh plan shared + ``` + +1. Run `validate` and check for violations. + + ```bash + ./tf-wrapper.sh validate shared $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} + ``` + +1. Run `apply` shared. + + ```bash + ./tf-wrapper.sh apply shared + ``` + +If you received any errors or made any changes to the Terraform config or `common.auto.tfvars` you must re-run `./tf-wrapper.sh plan ` before running `./tf-wrapper.sh apply `. + +After executing this stage, unset the `GOOGLE_IMPERSONATE_SERVICE_ACCOUNT` environment variable. + + ```bash + unset GOOGLE_IMPERSONATE_SERVICE_ACCOUNT + ``` + +#### Configuring Cloud Source Repository of Service Catalog Solutions Pipeline + +1. The next instructions assume that you are at the same level of the `terraform-google-enterprise-genai` folder + +1. Grab the Service Catalogs ID + + ```bash + export SERVICE_CATALOG_PROJECT_ID=$(terraform -chdir="terraform-google-enterprise-genai/4-projects/business_unit_3/shared" output -raw service_catalog_project_id) + echo ${SERVICE_CATALOG_PROJECT_ID} + ``` + +1. Clone the freshly minted Cloud Source Repository that was created for this project. + + ```bash + gcloud source repos clone service-catalog --project=${SERVICE_CATALOG_PROJECT_ID} + ``` + +1. Enter the repo folder and copy over the service catalogs files from `5-app-infra/source_repos/service-catalog` folder. + + ```bash + cd service-catalog/ + git checkout -b main + + cp -RT ../terraform-google-enterprise-genai/5-app-infra/source_repos/service-catalog/ . + git add img + git commit -m "Add img directory" + ``` + +1. Commit changes and push main branch to the new repo. + + ```bash + git add modules + git commit -m 'Initialize Service Catalog Build Repo' + + git push --set-upstream origin main + ``` + +1. `cd` out of the `service-catalog` repository. + + ```bash + cd .. + ``` + +1. Navigate to the project that was output from `${ARTIFACT_PROJECT_ID}` in Google's Cloud Console to view the first run of images being built. diff --git a/5-app-infra/1-artifact-publish/modules/publish_artifacts/data.tf b/5-app-infra/modules/publish_artifacts/data.tf similarity index 100% rename from 5-app-infra/1-artifact-publish/modules/publish_artifacts/data.tf rename to 5-app-infra/modules/publish_artifacts/data.tf diff --git a/5-app-infra/1-artifact-publish/modules/publish_artifacts/locals.tf b/5-app-infra/modules/publish_artifacts/locals.tf similarity index 100% rename from 5-app-infra/1-artifact-publish/modules/publish_artifacts/locals.tf rename to 5-app-infra/modules/publish_artifacts/locals.tf diff --git a/5-app-infra/1-artifact-publish/modules/publish_artifacts/main.tf b/5-app-infra/modules/publish_artifacts/main.tf similarity index 100% rename from 5-app-infra/1-artifact-publish/modules/publish_artifacts/main.tf rename to 5-app-infra/modules/publish_artifacts/main.tf diff --git a/5-app-infra/1-artifact-publish/modules/publish_artifacts/outputs.tf b/5-app-infra/modules/publish_artifacts/outputs.tf similarity index 100% rename from 5-app-infra/1-artifact-publish/modules/publish_artifacts/outputs.tf rename to 5-app-infra/modules/publish_artifacts/outputs.tf diff --git a/5-app-infra/1-artifact-publish/modules/publish_artifacts/variables.tf b/5-app-infra/modules/publish_artifacts/variables.tf similarity index 100% rename from 5-app-infra/1-artifact-publish/modules/publish_artifacts/variables.tf rename to 5-app-infra/modules/publish_artifacts/variables.tf diff --git a/5-app-infra/1-artifact-publish/modules/publish_artifacts/versions.tf b/5-app-infra/modules/publish_artifacts/versions.tf similarity index 100% rename from 5-app-infra/1-artifact-publish/modules/publish_artifacts/versions.tf rename to 5-app-infra/modules/publish_artifacts/versions.tf diff --git a/5-app-infra/3-service-catalog/modules/svc_ctlg/data.tf b/5-app-infra/modules/service_catalog/data.tf similarity index 100% rename from 5-app-infra/3-service-catalog/modules/svc_ctlg/data.tf rename to 5-app-infra/modules/service_catalog/data.tf diff --git a/5-app-infra/3-service-catalog/modules/svc_ctlg/locals.tf b/5-app-infra/modules/service_catalog/locals.tf similarity index 100% rename from 5-app-infra/3-service-catalog/modules/svc_ctlg/locals.tf rename to 5-app-infra/modules/service_catalog/locals.tf diff --git a/5-app-infra/3-service-catalog/modules/svc_ctlg/main.tf b/5-app-infra/modules/service_catalog/main.tf similarity index 100% rename from 5-app-infra/3-service-catalog/modules/svc_ctlg/main.tf rename to 5-app-infra/modules/service_catalog/main.tf diff --git a/5-app-infra/3-service-catalog/modules/svc_ctlg/outputs.tf b/5-app-infra/modules/service_catalog/outputs.tf similarity index 100% rename from 5-app-infra/3-service-catalog/modules/svc_ctlg/outputs.tf rename to 5-app-infra/modules/service_catalog/outputs.tf diff --git a/5-app-infra/3-service-catalog/modules/svc_ctlg/variables.tf b/5-app-infra/modules/service_catalog/variables.tf similarity index 100% rename from 5-app-infra/3-service-catalog/modules/svc_ctlg/variables.tf rename to 5-app-infra/modules/service_catalog/variables.tf diff --git a/5-app-infra/projects/artifact-publish/README.md b/5-app-infra/projects/artifact-publish/README.md deleted file mode 100644 index eb6ed8bc..00000000 --- a/5-app-infra/projects/artifact-publish/README.md +++ /dev/null @@ -1,344 +0,0 @@ -# 5-app-infra - -This repo is part of a multi-part guide that shows how to configure and deploy -the example.com reference architecture described in -[Google Cloud security foundations guide](https://cloud.google.com/architecture/security-foundations). The following table lists the parts of the guide. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
0-bootstrapBootstraps a Google Cloud organization, creating all the required resources -and permissions to start using the Cloud Foundation Toolkit (CFT). This -step also configures a CI/CD Pipeline for foundations code in subsequent -stages.
1-orgSets up top-level shared folders, monitoring and networking projects, -organization-level logging, and baseline security settings through -organizational policies.
2-environmentsSets up development, non-production, and production environments within the -Google Cloud organization that you've created.
3-networks-dual-svpcSets up base and restricted shared VPCs with default DNS, NAT (optional), -Private Service networking, VPC service controls, on-premises Dedicated -Interconnect, and baseline firewall rules for each environment. It also sets -up the global DNS hub.
3-networks-hub-and-spokeSets up base and restricted shared VPCs with all the default configuration -found on step 3-networks-dual-svpc, but here the architecture will be based on the -Hub and Spoke network model. It also sets up the global DNS hub
4-projectsSets up a folder structure, projects, and an application infrastructure pipeline for applications, - which are connected as service projects to the shared VPC created in the previous stage.
5-app-infra - projects/artifact-publish(this file)Deploys Composer and a pipeline
- -For an overview of the architecture and the parts, see the -[terraform-google-enterprise-genai README](https://github.com/terraform-google-modules/terraform-google-enterprise-genai) -file. - -## Purpose - -The purpose of this step is to deploy out an artifact registry to store custom docker images. A Cloud Build pipeline is also deployed out. At the time of this writing, it is configured to attach itself to a Cloud Source Repository. The Cloud Build pipeline is responsible for building out a custom image that may be used in Machine Learning Workflows. If you are in a situation where company policy requires no outside repositories to be accessed, custom images can be used to keep access to any image internally. - -Since every workflow will have access to these images, it is deployed in the `common` folder, and keeping with the foundations structure, is listed as `shared` under this Business Unit. It will only need to be deployed once. - -The Pipeline is connected to a GitHub repsository with a simple structure: - -``` -├── README.md -└── images - ├── tf2-cpu.2-13:0.1 - │   └── Dockerfile - └── tf2-gpu.2-13:0.1 - └── Dockerfile -``` -for the purposes of this example, the pipeline is configured to monitor the `main` branch of this repository. - -each folder under `images` has the full name and tag of the image that must be built. Once a change to the `main` branch is pushed, the pipeline will analyse which files have changed and build that image out and place it in the artifact repository. For example, if there is a change to the Dockerfile in the `tf2-cpu-13:0.1` folder, or if the folder itself has been renamed, it will build out an image and tag it based on the folder name that the Dockerfile has been housed in. - -Once pushed, the pipeline can be accessed by navigating to the project name created in step-4: - -```bash -terraform -chdir="../terraform-google-enterprise-genai/4-projects/business_unit_3/shared/" output -raw common_artifacts_project_id -``` - -## Prerequisites - -1. 0-bootstrap executed successfully. -1. 1-org executed successfully. -1. 2-environments executed successfully. -1. 3-networks executed successfully. -1. 4-projects executed successfully. - -### Troubleshooting - -Please refer to [troubleshooting](../docs/TROUBLESHOOTING.md) if you run into issues during this step. - -## Usage - -**Note:** If you are using MacOS, replace `cp -RT` with `cp -R` in the relevant -commands. The `-T` flag is needed for Linux, but causes problems for MacOS. - -### Deploying with Cloud Build - -1. Clone the `gcp-policies` repo based on the Terraform output from the `0-bootstrap` step. -Clone the repo at the same level of the `terraform-google-enterprise-genai` folder, the following instructions assume this layout. -Run `terraform output cloudbuild_project_id` in the `0-bootstrap` folder to get the Cloud Build Project ID. - - ```bash - export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${INFRA_PIPELINE_PROJECT_ID} - - gcloud source repos clone gcp-policies gcp-policies-app-infra --project=${INFRA_PIPELINE_PROJECT_ID} - ``` - - **Note:** `gcp-policies` repo has the same name as the repo created in step `1-org`. In order to prevent a collision, the previous command will clone this repo in the folder `gcp-policies-app-infra`. - -1. Navigate into the repo and copy contents of policy-library to new repo. All subsequent steps assume you are running them - from the gcp-policies-app-infra directory. If you run them from another directory, - adjust your copy paths accordingly. - - ```bash - cd gcp-policies-app-infra - git checkout -b main - - cp -RT ../terraform-google-enterprise-genai/policy-library/ . - ``` - -1. Commit changes and push your main branch to the new repo. - - ```bash - git add . - git commit -m 'Initialize policy library repo' - - git push --set-upstream origin main - ``` - -1. Navigate out of the repo. - - ```bash - cd .. - ``` - -1. Clone the `bu3-artifact-publish` repo. - - ```bash - gcloud source repos clone bu3-artifact-publish --project=${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Navigate into the repo, change to non-main branch and copy contents of foundation to new repo. - All subsequent steps assume you are running them from the bu3-artifact-publisg directory. - If you run them from another directory, adjust your copy paths accordingly. - - ```bash - cd bu3-artifact-publish - git checkout -b plan - - cp -RT ../terraform-google-enterprise-genai/5-app-infra/projects/artifact-publish/ . - cp ../terraform-google-enterprise-genai/build/cloudbuild-tf-* . - cp ../terraform-google-enterprise-genai/build/tf-wrapper.sh . - chmod 755 ./tf-wrapper.sh - ``` - -1. Rename `common.auto.example.tfvars` to `common.auto.tfvars`. - - ```bash - mv common.auto.example.tfvars common.auto.tfvars - ``` - -1. Update the file with values from your environment and 0-bootstrap. See any of the business unit 1 envs folders [README.md](./business_unit_1/production/README.md) files for additional information on the values in the `common.auto.tfvars` file. - - ```bash - export remote_state_bucket=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw projects_gcs_bucket_tfstate) - echo "remote_state_bucket = ${remote_state_bucket}" - sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-artifact-publish"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -1. Commit changes. - - ```bash - git add . - git commit -m 'Initialize repo' - ``` - -1. Push your plan branch to trigger a plan for all environments. Because the - _plan_ branch is not a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), pushing your _plan_ - branch triggers _terraform plan_ but not _terraform apply_. Review the plan output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git push --set-upstream origin plan - ``` - -1. Merge changes to shared. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), - pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git checkout -b shared - git push origin shared - ``` - -## Post deployment -1. `cd` out of the `artifacts-publish` repository. - -1. Grab the Artifact Project ID - ```shell - export ARTIFACT_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared" output -raw common_artifacts_project_id) - echo ${ARTIFACT_PROJECT_ID} - ``` - -1. Clone the freshly minted Cloud Source Repository that was created for this project. - ```shell - gcloud source repos clone publish-artifacts --project=${ARTIFACT_PROJECT_ID} - ``` -1. Enter the repo folder and copy over the artifact files from `5-app-infra/source_repos` folder. - ```shell - cd publish-artifacts - cp -r ../ml-foundations/5-app-infra/source_repos/artifact-publish/* . - ``` -1. Commit changes and push your main branch to the new repo. - ```shell - git add . - git commit -m 'Initialize Artifact Build Repo' - - git push --set-upstream origin main - ``` -1. Navigate to the project that was output from `${ARTIFACT_PROJECT_ID}` in Google's Cloud Console to view the first run of images being built. - - -### Run Terraform locally - -1. The next instructions assume that you are at the same level of the `terraform-google-enterprise-genai` folder. Change into `5-app-infra` folder, copy the Terraform wrapper script and ensure it can be executed. - - ```bash - cd terraform-google-enterprise-genai/5-app-infra/projects/artifact-publish - cp ../../../build/tf-wrapper.sh . - chmod 755 ./tf-wrapper.sh - ``` - -1. Rename `common.auto.example.tfvars` files to `common.auto.tfvars`. - - ```bash - mv common.auto.example.tfvars common.auto.tfvars - ``` - -1. Update `common.auto.tfvars` file with values from your environment. -1. Use `terraform output` to get the project backend bucket value from 0-bootstrap. - - ```bash - export remote_state_bucket=$(terraform -chdir="../0-bootstrap/" output -raw projects_gcs_bucket_tfstate) - echo "remote_state_bucket = ${remote_state_bucket}" - sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-artifact-publish"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-artifact-publish"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -1. Provide the user that will be running `./tf-wrapper.sh` the Service Account Token Creator role to the bu3 Terraform service account. -1. Provide the user permissions to run the terraform locally with the `serviceAccountTokenCreator` permission. - - ```bash - member="user:$(gcloud auth list --filter="status=ACTIVE" --format="value(account)")" - echo ${member} - - project_id=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${project_id} - - terraform_sa=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-artifact-publish"' --raw-output) - echo ${terraform_sa} - - gcloud iam service-accounts add-iam-policy-binding ${terraform_sa} --project ${project_id} --member="${member}" --role="roles/iam.serviceAccountTokenCreator" - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-artifact-publish"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -We will now deploy each of our environments (development/production/non-production) using this script. -When using Cloud Build or Jenkins as your CI/CD tool, each environment corresponds to a branch in the repository for the `5-app-infra` step. Only the corresponding environment is applied. - -To use the `validate` option of the `tf-wrapper.sh` script, please follow the [instructions](https://cloud.google.com/docs/terraform/policy-validation/validate-policies#install) to install the terraform-tools component. - -1. Use `terraform output` to get the Infra Pipeline Project ID from 4-projects output. - - ```bash - export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${INFRA_PIPELINE_PROJECT_ID} - - export GOOGLE_IMPERSONATE_SERVICE_ACCOUNT=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-artifact-publish"' --raw-output) - echo ${GOOGLE_IMPERSONATE_SERVICE_ACCOUNT} - ``` - -1. Run `init` and `plan` and review output for environment shared (common). - - ```bash - ./tf-wrapper.sh init shared - ./tf-wrapper.sh plan shared - ``` - -1. Run `validate` and check for violations. - - ```bash - ./tf-wrapper.sh validate shared $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Run `apply` shared. - - ```bash - ./tf-wrapper.sh apply shared - ``` - - -If you received any errors or made any changes to the Terraform config or `common.auto.tfvars` you must re-run `./tf-wrapper.sh plan ` before running `./tf-wrapper.sh apply `. - -After executing this stage, unset the `GOOGLE_IMPERSONATE_SERVICE_ACCOUNT` environment variable. - -```bash -unset GOOGLE_IMPERSONATE_SERVICE_ACCOUNT -``` - diff --git a/5-app-infra/projects/artifact-publish/business_unit_3/shared/publish_artifacts.tf b/5-app-infra/projects/artifact-publish/business_unit_3/shared/publish_artifacts.tf index 5c20479e..5983f92c 100644 --- a/5-app-infra/projects/artifact-publish/business_unit_3/shared/publish_artifacts.tf +++ b/5-app-infra/projects/artifact-publish/business_unit_3/shared/publish_artifacts.tf @@ -15,7 +15,7 @@ */ module "artifact_publish" { - source = "../../modules/publish_artifacts" + source = "../../../../modules/publish_artifacts" environment = local.environment description = "Publish Artifacts for ML Projects" diff --git a/5-app-infra/projects/artifact-publish/modules/publish_artifacts/data.tf b/5-app-infra/projects/artifact-publish/modules/publish_artifacts/data.tf deleted file mode 100644 index afd1ef09..00000000 --- a/5-app-infra/projects/artifact-publish/modules/publish_artifacts/data.tf +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Copyright 2023 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -data "google_project" "project" { - project_id = var.project_id -} - -data "google_projects" "kms" { - filter = "labels.application_name:org-kms labels.environment:production lifecycleState:ACTIVE" - # filter = "labels.application_name:env-kms labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} - -data "google_kms_key_ring" "kms" { - name = "sample-keyring" - location = var.region - project = data.google_projects.kms.projects.0.project_id -} - -data "google_kms_crypto_key" "key" { - name = data.google_project.project.name - key_ring = data.google_kms_key_ring.kms.id -} diff --git a/5-app-infra/projects/artifact-publish/modules/publish_artifacts/locals.tf b/5-app-infra/projects/artifact-publish/modules/publish_artifacts/locals.tf deleted file mode 100644 index 2e394621..00000000 --- a/5-app-infra/projects/artifact-publish/modules/publish_artifacts/locals.tf +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Copyright 2023 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - env_code = substr(var.environment, 0, 1) - name_var = format("%s-%s", local.env_code, var.name) - # key_ring_var = "projects/${var.cmek_project_id}/locations/${var.region}/keyRings/sample-keyring" - region_short_code = { - "us-central1" = "usc1" - "us-east4" = "use4" - } - # github_owner = split("/", split("https://github.com/", var.github_remote_uri)[1])[0] - # github_repo_name = trim(basename(var.github_remote_uri), ".git") - - trigger_sa_roles = [ - "roles/artifactregistry.reader", - "roles/artifactregistry.writer", - ] - # github_repository = replace(var.github_remote_uri, "https://", "") -} diff --git a/5-app-infra/projects/artifact-publish/modules/publish_artifacts/main.tf b/5-app-infra/projects/artifact-publish/modules/publish_artifacts/main.tf deleted file mode 100644 index 5f5e1293..00000000 --- a/5-app-infra/projects/artifact-publish/modules/publish_artifacts/main.tf +++ /dev/null @@ -1,162 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -resource "google_project_service_identity" "artifact_registry_agent" { - provider = google-beta - - project = var.project_id - service = "artifactregistry.googleapis.com" -} - -resource "google_kms_crypto_key_iam_member" "artifact-kms-key-binding" { - crypto_key_id = data.google_kms_crypto_key.key.id - role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" - member = "serviceAccount:${google_project_service_identity.artifact_registry_agent.email}" -} - -resource "google_artifact_registry_repository" "repo" { - provider = google-beta - location = var.region - repository_id = local.name_var - description = var.description - format = var.format - cleanup_policy_dry_run = var.cleanup_policy_dry_run - project = data.google_project.project.project_id - - #Customer Managed Encryption Keys - #Control ID: COM-CO-2.3 - #NIST 800-53: SC-12 SC-13 - #CRI Profile: PR.DS-1.1 PR.DS-1.2 PR.DS-2.1 PR.DS-2.2 PR.DS-5.1 - - kms_key_name = data.google_kms_crypto_key.key.id - - #Cleanup policy - #Control ID: AR-CO-6.1 - #NIST 800-53: SI-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - dynamic "cleanup_policies" { - for_each = var.cleanup_policies - content { - id = cleanup_policies.value.id - action = cleanup_policies.value.action - - dynamic "condition" { - for_each = cleanup_policies.value.condition != null ? [cleanup_policies.value.condition] : [] - content { - tag_state = condition.value[0].tag_state - tag_prefixes = condition.value[0].tag_prefixes - package_name_prefixes = condition.value[0].package_name_prefixes - older_than = condition.value[0].older_than - } - } - - dynamic "most_recent_versions" { - for_each = cleanup_policies.value.most_recent_versions != null ? [cleanup_policies.value.most_recent_versions] : [] - content { - package_name_prefixes = most_recent_versions.value[0].package_name_prefixes - keep_count = most_recent_versions.value[0].keep_count - } - } - } - } - depends_on = [ - google_kms_crypto_key_iam_member.artifact-kms-key-binding, - - ] -} -resource "google_artifact_registry_repository_iam_member" "project" { - for_each = toset(local.trigger_sa_roles) - project = var.project_id - repository = google_artifact_registry_repository.repo.name - location = var.region - role = each.key - # member = "serviceAccount:${google_service_account.trigger_sa.email}" - member = "serviceAccount:${data.google_project.project.number}@cloudbuild.gserviceaccount.com" -} - -# resource "google_sourcerepo_repository" "artifact_repo" { -# project = var.project_id -# name = var.name -# } -resource "google_cloudbuild_trigger" "docker_build" { - name = "docker-build" - project = var.project_id - location = var.region - - trigger_template { - branch_name = "^main$" - repo_name = var.name - } - build { - step { - id = "unshallow" - name = "gcr.io/cloud-builders/git" - entrypoint = "/bin/bash" - args = [ - "-c", - "git fetch --unshallow" - ] - } - step { - id = "select-folder" - name = "gcr.io/cloud-builders/git" - entrypoint = "/bin/bash" - args = [ - "-c", - <<-EOT - changed_files=$(git diff $${COMMIT_SHA}^1 --name-only -r) - changed_folders=$(echo "$changed_files" | awk -F/ '{print $2}' | sort | uniq ) - - for folder in $changed_folders; do - echo "Found docker folder: $folder" - echo $folder >> /workspace/docker_build - done - EOT - ] - } - step { - id = "build-image" - wait_for = ["select-folder"] - name = "gcr.io/cloud-builders/docker" - entrypoint = "/bin/bash" - args = [ - "-c", - <<-EOT - build_path="/workspace/docker_build" - while IFS= read -r line; do - docker build -t gcr.io/$PROJECT_ID/$line images/$line - done < "$build_path" - EOT - ] - } - - step { - id = "push-image" - wait_for = ["select-folder", "build-image"] - name = "gcr.io/cloud-builders/docker" - entrypoint = "/bin/bash" - args = [ - "-c", - <<-EOT - build_path="/workspace/docker_build" - while IFS= read -r line; do - docker push gcr.io/$PROJECT_ID/$line - done < "$build_path" - EOT - ] - } - } -} diff --git a/5-app-infra/projects/artifact-publish/modules/publish_artifacts/outputs.tf b/5-app-infra/projects/artifact-publish/modules/publish_artifacts/outputs.tf deleted file mode 100644 index 668f4e96..00000000 --- a/5-app-infra/projects/artifact-publish/modules/publish_artifacts/outputs.tf +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -output "cloudbuild_trigger_id" { - value = google_cloudbuild_trigger.docker_build.id -} - diff --git a/5-app-infra/projects/artifact-publish/modules/publish_artifacts/variables.tf b/5-app-infra/projects/artifact-publish/modules/publish_artifacts/variables.tf deleted file mode 100644 index 28e68618..00000000 --- a/5-app-infra/projects/artifact-publish/modules/publish_artifacts/variables.tf +++ /dev/null @@ -1,71 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "region" { - description = "Location of the repository." - type = string -} -variable "name" { - description = "Name of the repository." - type = string -} - -variable "description" { - description = "Description of the repository." - type = string -} - -variable "format" { - description = "Format of the repository." - type = string -} - -variable "cleanup_policy_dry_run" { - description = "Whether to perform a dry run of the cleanup policy." - type = bool - default = false -} - -variable "cleanup_policies" { - description = "List of cleanup policies." - type = list(object({ - id = string - action = optional(string) - condition = optional(list(object({ - tag_state = optional(string) - tag_prefixes = optional(list(string)) - package_name_prefixes = optional(list(string)) - older_than = optional(string) - }))) - most_recent_versions = optional(list(object({ - package_name_prefixes = optional(list(string)) - keep_count = optional(number) - }))) - })) -} - -variable "environment" { - type = string - description = "development | staging | production | commmon" - validation { - condition = contains(["development", "staging", "production", "common"], var.environment) - error_message = "Environment must be one of [development, staging, production]." - } -} - -variable "project_id" { - description = "Project ID" -} diff --git a/5-app-infra/projects/artifact-publish/modules/publish_artifacts/versions.tf b/5-app-infra/projects/artifact-publish/modules/publish_artifacts/versions.tf deleted file mode 100644 index 7fb6a704..00000000 --- a/5-app-infra/projects/artifact-publish/modules/publish_artifacts/versions.tf +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -terraform { - required_version = ">= 0.13" - required_providers { - google = { - source = "hashicorp/google" - version = ">= 3.61" - } - google-beta = { - source = "hashicorp/google-beta" - version = ">= 3.61" - } - null = { - source = "hashicorp/null" - version = "~> 3.0" - } - - random = { - source = "hashicorp/random" - version = "~> 3.1" - } - } - - provider_meta "google" { - module_name = "blueprints/terraform/terraform-google-enterprise-genai:app_env_base/v0.0.1" - } - - provider_meta "google-beta" { - module_name = "blueprints/terraform/terraform-google-enterprise-genai:app_env_base/v0.0.1" - } -} diff --git a/5-app-infra/projects/machine-learning/.gitignore b/5-app-infra/projects/machine-learning/.gitignore deleted file mode 100644 index db6c7ac6..00000000 --- a/5-app-infra/projects/machine-learning/.gitignore +++ /dev/null @@ -1,16 +0,0 @@ -# Local .terraform directories -**/.terraform/* -.terraform.lock.hcl - -# .tfstate files -*.tfstate -*.tfstate.* - -# plan file -*.tfplan -**/*.tfplan - -# zip file -*.zip -*.tar -*.tar.gz diff --git a/5-app-infra/projects/machine-learning/README.md b/5-app-infra/projects/machine-learning/README.md deleted file mode 100644 index 91bb1602..00000000 --- a/5-app-infra/projects/machine-learning/README.md +++ /dev/null @@ -1,444 +0,0 @@ -# 5-app-infra - -This repo is part of a multi-part guide that shows how to configure and deploy -the example.com reference architecture described in -[Google Cloud security foundations guide](https://cloud.google.com/architecture/security-foundations). The following table lists the parts of the guide. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
0-bootstrapBootstraps a Google Cloud organization, creating all the required resources -and permissions to start using the Cloud Foundation Toolkit (CFT). This -step also configures a CI/CD Pipeline for foundations code in subsequent -stages.
1-orgSets up top-level shared folders, monitoring and networking projects, -organization-level logging, and baseline security settings through -organizational policies.
2-environmentsSets up development, non-production, and production environments within the -Google Cloud organization that you've created.
3-networks-dual-svpcSets up base and restricted shared VPCs with default DNS, NAT (optional), -Private Service networking, VPC service controls, on-premises Dedicated -Interconnect, and baseline firewall rules for each environment. It also sets -up the global DNS hub.
3-networks-hub-and-spokeSets up base and restricted shared VPCs with all the default configuration -found on step 3-networks-dual-svpc, but here the architecture will be based on the -Hub and Spoke network model. It also sets up the global DNS hub
4-projectsSets up a folder structure, projects, and an application infrastructure pipeline for applications, - which are connected as service projects to the shared VPC created in the previous stage.
5-app-infra - projects/machine-learning(this file)Deploys Notebooks for Vertex AI
- -For an overview of the architecture and the parts, see the -[terraform-google-enterprise-genai README](https://github.com/terraform-google-modules/terraform-google-enterprise-genai) -file. - -## Purpose - -\*N.B\* - fill in here - - -## Prerequisites - -1. 0-bootstrap executed successfully. -1. 1-org executed successfully. -1. 2-environments executed successfully. -1. 3-networks executed successfully. -1. 4-projects executed successfully. - -### Troubleshooting - -Please refer to [troubleshooting](../docs/TROUBLESHOOTING.md) if you run into issues during this step. - -## Usage - -**Note:** If you are using MacOS, replace `cp -RT` with `cp -R` in the relevant -commands. The `-T` flag is needed for Linux, but causes problems for MacOS. - -You will need a github repository set up for this step. This repository houses the DAG's for composer. As of this writing, the structure is as follows: - ``` - . - ├── README.md - └── dags - ├── hello_world.py - └── strings.py - ``` -Add in your dags in the `dags` folder. Any changes to this folder will trigger a pipeline and place the dags in the appropriate composer environment depending on which branch it is pushed to (`development`, `non-production`, `production`) - -Have a github token for access to your repository ready, along with an [Application Installation Id](https://cloud.google.com/build/docs/automating-builds/github/connect-repo-github#connecting_a_github_host_programmatically) and the remote uri to your repository. - -These environmental project inflations are closely tied to the `service-catalog` project that have already deployed. By now, the `bu3-service-catalog` should have been inflated. `service-catalog` contains modules that are being deployed in an interactive (development) environment. Since they already exist; they can be used as terraform modules for operational (non-production, production) environments. This was done in order to avoid code redundancy. One area for all `machine-learning` deployments. - -Under `modules/base_env/main.tf` you will notice all module calls are using `git` links as sources. These links refer to the `service-catalog` cloud source repository we have already set up. - -Step 12 in "Deploying with Cloud Build" highlights the necessary steps needed to point the module resources to the correct location. - -### Deploying with Cloud Build - -1. Clone the `gcp-policies` repo based on the Terraform output from the `0-bootstrap` step. -Clone the repo at the same level of the `terraform-google-enterprise-genai` folder, the following instructions assume this layout. -Run `terraform output cloudbuild_project_id` in the `0-bootstrap` folder to get the Cloud Build Project ID. - - ```bash - export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${INFRA_PIPELINE_PROJECT_ID} - - gcloud source repos clone gcp-policies gcp-policies-app-infra --project=${INFRA_PIPELINE_PROJECT_ID} - ``` - - **Note:** `gcp-policies` repo has the same name as the repo created in step `1-org`. In order to prevent a collision, the previous command will clone this repo in the folder `gcp-policies-app-infra`. - -1. Navigate into the repo and copy contents of policy-library to new repo. All subsequent steps assume you are running them - from the gcp-policies-app-infra directory. If you run them from another directory, - adjust your copy paths accordingly. - - ```bash - cd gcp-policies-app-infra - git checkout -b main - - cp -RT ../terraform-google-enterprise-genai/policy-library/ . - ``` - -1. Commit changes and push your main branch to the new repo. - - ```bash - git add . - git commit -m 'Initialize policy library repo' - - git push --set-upstream origin main - ``` - -1. Navigate out of the repo. - - ```bash - cd .. - ``` - -1. Clone the `bu3-machine-learning` repo. - - ```bash - gcloud source repos clone bu3-machine-learning --project=${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Navigate into the repo, change to non-main branch and copy contents of foundation to new repo. - All subsequent steps assume you are running them from the bu3-machine-learning directory. - If you run them from another directory, adjust your copy paths accordingly. - - ```bash - cd bu3-machine-learning - git checkout -b plan - - cp -RT ../terraform-google-enterprise-genai/5-app-infra/projects/machine-learning/ . - cp ../terraform-google-enterprise-genai/build/cloudbuild-tf-* . - cp ../terraform-google-enterprise-genai/build/tf-wrapper.sh . - chmod 755 ./tf-wrapper.sh - ``` - -1. Rename `common.auto.example.tfvars` to `common.auto.tfvars`. - - ```bash - mv common.auto.example.tfvars common.auto.tfvars - ``` - -1. Update the `common.auto.tfvars` file with your github app installation id, along with the url of your repository. - -1. Use `terraform output` to get the project backend bucket value from 0-bootstrap. - - ```bash - export remote_state_bucket=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw projects_gcs_bucket_tfstate) - echo "remote_state_bucket = ${remote_state_bucket}" - sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-machine-learning"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - ## Linux - for i in `find . -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - - ## MacOS - for i in `find . -name 'backend.tf'`; do sed -i "" "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` -1. Update `modules/base_env/main.tf` with the name of service catalog project id to complete the git fqdn for module sources: - ```bash - export service_catalog_project_id=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -raw service_catalog_project_id) - - ##LINUX - sed -i "s/SERVICE-CATALOG-PROJECT-ID/${service_catalog_project_id}/" ./modules/base_env/main.tf - - ##MacOS - sed -i "" "s/SERVICE-CATALOG-PROJECT-ID/${service_catalog_project_id}/" ./modules/base_env/main.tf - ``` -1. Commit changes. - - ```bash - git add . - git commit -m 'Initialize repo' - ``` -1. Composer will rely on DAG's from a github repository. In `4-projects`, a secret 'github-api-token' was created to house your github's api access key. We need to create a new version for this secret which will be used in the composer module which is called in the `base_env` folder. Use the script below to add the secrets into each machine learnings respective environment: - ```bash - envs=(development non-production production) - project_ids=() - github_token = "YOUR-GITHUB-TOKEN" - - for env in "${envs[@]}"; do - output=$(terraform -chdir="../gcp-projects/business_unit_3/${env}" output -raw machine_learning_project_id) - project_ids+=("$output") - done - - for project in "${project_ids[@]}"; do - echo -n $github_token | gcloud secrets versions add github-api-token --data-file=- --project=${project} - done - ``` -1. Push your plan branch to trigger a plan for all environments. Because the - _plan_ branch is not a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), pushing your _plan_ - branch triggers _terraform plan_ but not _terraform apply_. Review the plan output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git push --set-upstream origin plan - ``` - -1. Merge changes to development. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), - pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ``` - git checkout -b development - git push origin development - ``` - -1. Merge changes to non-production. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), - pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git checkout -b non-production - git push origin non-production - ``` - -1. Merge changes to production branch. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), - pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git checkout -b production - git push origin production - ``` - -## Post Deployment - -Since this project is in a service perimiter, there will be _additional_ entries that will be needed. This is most notable for the `interactive` environment (development). Since many of the necessary service agents and permissions were deployed in this project, we will _need to return to `3-networks`_ and add in more agents to the DEVELOPMENT.AUTO.TFVARS file under `egress_policies`. -Notably: - - * "serviceAccount:bq-[prj-d-bu3machine-learning-project-number]@bigquery-encryption.iam.gserviceaccount.com" - -This should be added under identities. It should look like this:: - ``` - egress_policies = [ - // notebooks - { - "from" = { - "identity_type" = "" - "identities" = [ - "serviceAccount:bq-[prj-d-bu3machine-learning-project-number]@bigquery-encryption.iam.gserviceaccount.com" << New Addition - "serviceAccount:service-[prj-d-bu3machine-learning-project-number]@gcp-sa-notebooks.iam.gserviceaccount.com", - "serviceAccount:service-[prj-d-bu3machine-learning-project-number]@compute-system.iam.gserviceaccount.com", - ] - }, - "to" = { - "resources" = ["projects/[prj-d-kms-project-number]"] - "operations" = { - "compute.googleapis.com" = { - "methods" = ["*"] - } - "cloudkms.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, - ] - ``` - -SERVICE CATALOG: -Once you have set up service catalog and attempt to deploy out terraform code, there is a high chance you will encounter this error: -`Permission denied; please check you have the correct IAM permissions and APIs enabled.` -This is due to a VPC Service control error that until now, is impossible to add into the egress policy. Go to `prj-d-bu3machine-learning` project and view the logs. There will be a VPC Service Controls entry that has an `egressViolation`. It should look something like this: -``` -egressViolations: [ - 0: { - servicePerimeter: "accessPolicies/1066661933618/servicePerimeters/sp_d_shared_restricted_default_perimeter_f3fv" - source: "projects/[machine-learning-project-number]" - sourceType: "Resource" - targetResource: "projects/[unknown-project-number]" - } -] -``` -we want the `unknown-project-number` here. Add this into your `egress_policies` in `3-networks` under DEVELOPMENT.AUTO.TFVARS -``` -// Service Catalog - { - "from" = { - "identity_type" = "ANY_IDENTITY" - "identities" = [] - }, - "to" = { - "resources" = ["projects/[unknown-project-number]"] - "operations" = { - "cloudbuild.googleapis.com" = { - "methods" = ["*"] - } - } - } - }, -``` - -## Running Terraform locally - -1. The next instructions assume that you are at the same level of the `terraform-google-enterprise-genai` folder. Change into `5-app-infra` folder, copy the Terraform wrapper script and ensure it can be executed. - - ```bash - cd terraform-google-enterprise-genai/5-app-infra/projects/machine-learning - cp ../../../build/tf-wrapper.sh . - chmod 755 ./tf-wrapper.sh - ``` - -1. Rename `common.auto.example.tfvars` files to `common.auto.tfvars`. - - ```bash - mv common.auto.example.tfvars common.auto.tfvars - ``` - -1. Update `common.auto.tfvars` file with values from your environment. -1. Use `terraform output` to get the project backend bucket value from 0-bootstrap. - - ```bash - export remote_state_bucket=$(terraform -chdir="../0-bootstrap/" output -raw projects_gcs_bucket_tfstate) - echo "remote_state_bucket = ${remote_state_bucket}" - sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars - ``` - -1. Provide the user that will be running `./tf-wrapper.sh` the Service Account Token Creator role to the bu3 Terraform service account. -1. Provide the user permissions to run the terraform locally with the `serviceAccountTokenCreator` permission. - - ```bash - member="user:$(gcloud auth list --filter="status=ACTIVE" --format="value(account)")" - echo ${member} - - project_id=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${project_id} - - terraform_sa=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-machine-learning"' --raw-output) - echo ${terraform_sa} - - gcloud iam service-accounts add-iam-policy-binding ${terraform_sa} --project ${project_id} --member="${member}" --role="roles/iam.serviceAccountTokenCreator" - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-machine-learning"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -We will now deploy each of our environments (development/production/non-production) using this script. -When using Cloud Build or Jenkins as your CI/CD tool, each environment corresponds to a branch in the repository for the `5-app-infra` step. Only the corresponding environment is applied. - -To use the `validate` option of the `tf-wrapper.sh` script, please follow the [instructions](https://cloud.google.com/docs/terraform/policy-validation/validate-policies#install) to install the terraform-tools component. - -1. Use `terraform output` to get the Infra Pipeline Project ID from 4-projects output. - - ```bash - export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${INFRA_PIPELINE_PROJECT_ID} - - export GOOGLE_IMPERSONATE_SERVICE_ACCOUNT=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-machine-learning"' --raw-output) - echo ${GOOGLE_IMPERSONATE_SERVICE_ACCOUNT} - ``` - -1. Run `init` and `plan` and review output for environment production. - - ```bash - ./tf-wrapper.sh init production - ./tf-wrapper.sh plan production - ``` - -1. Run `validate` and check for violations. - - ```bash - ./tf-wrapper.sh validate production $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Run `apply` production. - - ```bash - ./tf-wrapper.sh apply production - ``` - -1. Run `init` and `plan` and review output for environment non-production. - - ```bash - ./tf-wrapper.sh init non-production - ./tf-wrapper.sh plan non-production - ``` - -1. Run `validate` and check for violations. - - ```bash - ./tf-wrapper.sh validate non-production $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Run `apply` non-production. - - ```bash - ./tf-wrapper.sh apply non-production - ``` - -1. Run `init` and `plan` and review output for environment development. - - ```bash - ./tf-wrapper.sh init development - ./tf-wrapper.sh plan development - ``` - -1. Run `validate` and check for violations. - - ```bash - ./tf-wrapper.sh validate development $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Run `apply` development. - - ```bash - ./tf-wrapper.sh apply development - ``` - -If you received any errors or made any changes to the Terraform config or `common.auto.tfvars` you must re-run `./tf-wrapper.sh plan ` before running `./tf-wrapper.sh apply `. - -After executing this stage, unset the `GOOGLE_IMPERSONATE_SERVICE_ACCOUNT` environment variable. - -```bash -unset GOOGLE_IMPERSONATE_SERVICE_ACCOUNT -``` diff --git a/5-app-infra/projects/machine-learning/business_unit_3/development/README.md b/5-app-infra/projects/machine-learning/business_unit_3/development/README.md deleted file mode 100644 index a782b0fb..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/development/README.md +++ /dev/null @@ -1,15 +0,0 @@ - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| github\_app\_installation\_id | The app installation ID that was created when installing Google Cloud Build in Github: https://github.com/apps/google-cloud-build | `number` | n/a | yes | -| github\_remote\_uri | The remote uri of your github repository | `string` | n/a | yes | -| instance\_region | The region where notebook instance will be created. A subnetwork must exists in the instance region. | `string` | n/a | yes | -| remote\_state\_bucket | Backend bucket to load remote state information from previous steps. | `string` | n/a | yes | - -## Outputs - -No outputs. - - diff --git a/5-app-infra/projects/machine-learning/business_unit_3/development/backend.tf b/5-app-infra/projects/machine-learning/business_unit_3/development/backend.tf deleted file mode 100644 index 60060517..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/development/backend.tf +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -terraform { - backend "gcs" { - bucket = "UPDATE_APP_INFRA_BUCKET" - prefix = "terraform/app-infra/business_unit_3/development" - } -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/development/common.auto.tfvars b/5-app-infra/projects/machine-learning/business_unit_3/development/common.auto.tfvars deleted file mode 120000 index 39aaa462..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/development/common.auto.tfvars +++ /dev/null @@ -1 +0,0 @@ -../../common.auto.tfvars \ No newline at end of file diff --git a/5-app-infra/projects/machine-learning/business_unit_3/development/locals.tf b/5-app-infra/projects/machine-learning/business_unit_3/development/locals.tf deleted file mode 100644 index 39b3471f..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/development/locals.tf +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - business_unit = "business_unit_3" - business_code = "bu3" - env = "development" - environment_code = "d" -} - diff --git a/5-app-infra/projects/machine-learning/business_unit_3/development/main.tf b/5-app-infra/projects/machine-learning/business_unit_3/development/main.tf deleted file mode 100644 index f6874834..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/development/main.tf +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -module "base_env" { - source = "../../modules/base_env" - - env = local.env - environment_code = local.environment_code - business_code = local.business_code - project_id = local.machine_learning_project_id - kms_keys = local.machine_learning_kms_keys -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/development/outputs.tf b/5-app-infra/projects/machine-learning/business_unit_3/development/outputs.tf deleted file mode 100644 index 9d277cce..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/development/outputs.tf +++ /dev/null @@ -1,16 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - diff --git a/5-app-infra/projects/machine-learning/business_unit_3/development/remote.tf b/5-app-infra/projects/machine-learning/business_unit_3/development/remote.tf deleted file mode 100644 index dc533538..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/development/remote.tf +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - machine_learning_project_id = data.terraform_remote_state.projects_env.outputs.machine_learning_project_id - machine_learning_kms_keys = data.terraform_remote_state.projects_env.outputs.machine_learning_kms_keys - service_catalog_repo_name = data.terraform_remote_state.projects_shared.outputs.service_catalog_repo_name - service_catalog_project_id = data.terraform_remote_state.projects_shared.outputs.service_catalog_project_id -} - -data "terraform_remote_state" "projects_env" { - backend = "gcs" - - config = { - bucket = var.remote_state_bucket - prefix = "terraform/projects/${local.business_unit}/${local.env}" - } -} - -data "terraform_remote_state" "projects_shared" { - backend = "gcs" - - config = { - bucket = var.remote_state_bucket - prefix = "terraform/projects/${local.business_unit}/shared" - } -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/development/variables.tf b/5-app-infra/projects/machine-learning/business_unit_3/development/variables.tf deleted file mode 100644 index c26d31ca..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/development/variables.tf +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "instance_region" { - description = "The region where notebook instance will be created. A subnetwork must exists in the instance region." - type = string -} - -variable "remote_state_bucket" { - description = "Backend bucket to load remote state information from previous steps." - type = string -} - -variable "github_app_installation_id" { - description = "The app installation ID that was created when installing Google Cloud Build in Github: https://github.com/apps/google-cloud-build" - type = number - -} -variable "github_remote_uri" { - description = "The remote uri of your github repository" - type = string -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/development/versions.tf b/5-app-infra/projects/machine-learning/business_unit_3/development/versions.tf deleted file mode 100644 index baa38abb..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/development/versions.tf +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -terraform { - required_version = ">= 0.13" - - required_providers { - - google = { - source = "hashicorp/google" - version = ">= 3.77, < 6" - } - - google-beta = { - source = "hashicorp/google-beta" - version = ">= 3.77, < 6" - } - - null = { - source = "hashicorp/null" - version = "~> 3.0" - } - - random = { - source = "hashicorp/random" - version = "~> 3.1" - } - - } -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/non-production/README.md b/5-app-infra/projects/machine-learning/business_unit_3/non-production/README.md deleted file mode 100644 index 3255fd1a..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/non-production/README.md +++ /dev/null @@ -1,20 +0,0 @@ - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| github\_app\_installation\_id | The app installation ID that was created when installing Google Cloud Build in Github: https://github.com/apps/google-cloud-build | `number` | n/a | yes | -| github\_remote\_uri | The remote uri of your github repository | `string` | n/a | yes | -| instance\_region | The region where notebook instance will be created. A subnetwork must exists in the instance region. | `string` | n/a | yes | -| remote\_state\_bucket | Backend bucket to load remote state information from previous steps. | `string` | n/a | yes | - -## Outputs - -| Name | Description | -|------|-------------| -| bucket | Generated unique name fort the bucket | -| composer | Cloud Composer Environment. | -| metadata | an identifier for the resource with format {{name}} | -| tensorboard | TensorBoard resource. | - - diff --git a/5-app-infra/projects/machine-learning/business_unit_3/non-production/backend.tf b/5-app-infra/projects/machine-learning/business_unit_3/non-production/backend.tf deleted file mode 100644 index a8c71ae7..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/non-production/backend.tf +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -terraform { - backend "gcs" { - bucket = "UPDATE_APP_INFRA_BUCKET" - prefix = "terraform/app-infra/business_unit_3/non-production" - } -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/non-production/common.auto.tfvars b/5-app-infra/projects/machine-learning/business_unit_3/non-production/common.auto.tfvars deleted file mode 120000 index 39aaa462..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/non-production/common.auto.tfvars +++ /dev/null @@ -1 +0,0 @@ -../../common.auto.tfvars \ No newline at end of file diff --git a/5-app-infra/projects/machine-learning/business_unit_3/non-production/locals.tf b/5-app-infra/projects/machine-learning/business_unit_3/non-production/locals.tf deleted file mode 100644 index 6600d7b7..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/non-production/locals.tf +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - business_unit = "business_unit_3" - business_code = "bu3" - env = "non-production" - environment_code = "n" -} - diff --git a/5-app-infra/projects/machine-learning/business_unit_3/non-production/main.tf b/5-app-infra/projects/machine-learning/business_unit_3/non-production/main.tf deleted file mode 100644 index 980ebbfa..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/non-production/main.tf +++ /dev/null @@ -1,56 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -module "base_env" { - source = "../../modules/base_env" - - env = local.env - environment_code = local.environment_code - business_code = local.business_code - project_id = local.machine_learning_project_id - - kms_keys = local.machine_learning_kms_keys - - // Composer - composer_name = "composer" - composer_airflow_config_overrides = { - core-dags_are_paused_at_creation = "true" - } - composer_github_app_installation_id = var.github_app_installation_id - composer_github_remote_uri = var.github_remote_uri - - composer_pypi_packages = { - google-cloud-bigquery = "" - db-dtypes = "" - google-cloud-aiplatform = "" - google-cloud-storage = "" - tensorflow = "" - # tensorflow-io = "" - } - - // BigQuery - big_query_dataset_id = "census_dataset" - - // Metadata - metadata_name = "metadata-store-${local.env}" - - // Bucket - bucket_name = "ml-storage" - - // TensorBoard - tensorboard_name = "ml-tensorboard-${local.env}" -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/non-production/outputs.tf b/5-app-infra/projects/machine-learning/business_unit_3/non-production/outputs.tf deleted file mode 100644 index cd5d4fbb..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/non-production/outputs.tf +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// Composer - -output "composer" { - value = try(module.base_env.composer, "") - description = "Cloud Composer Environment." -} - -######################## -# Metadata # -######################## - -output "metadata" { - description = "an identifier for the resource with format {{name}}" - value = try(module.base_env.metadata, "") -} - -######################## -# Bucket # -######################## - -output "bucket" { - description = "Generated unique name fort the bucket" - value = try(module.base_env.bucket, "") -} - -######################## -# TensorBoard # -######################## - -output "tensorboard" { - description = "TensorBoard resource." - value = try(module.base_env.tensorboard, "") -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/non-production/remote.tf b/5-app-infra/projects/machine-learning/business_unit_3/non-production/remote.tf deleted file mode 100644 index dc533538..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/non-production/remote.tf +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - machine_learning_project_id = data.terraform_remote_state.projects_env.outputs.machine_learning_project_id - machine_learning_kms_keys = data.terraform_remote_state.projects_env.outputs.machine_learning_kms_keys - service_catalog_repo_name = data.terraform_remote_state.projects_shared.outputs.service_catalog_repo_name - service_catalog_project_id = data.terraform_remote_state.projects_shared.outputs.service_catalog_project_id -} - -data "terraform_remote_state" "projects_env" { - backend = "gcs" - - config = { - bucket = var.remote_state_bucket - prefix = "terraform/projects/${local.business_unit}/${local.env}" - } -} - -data "terraform_remote_state" "projects_shared" { - backend = "gcs" - - config = { - bucket = var.remote_state_bucket - prefix = "terraform/projects/${local.business_unit}/shared" - } -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/non-production/variables.tf b/5-app-infra/projects/machine-learning/business_unit_3/non-production/variables.tf deleted file mode 100644 index c26d31ca..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/non-production/variables.tf +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "instance_region" { - description = "The region where notebook instance will be created. A subnetwork must exists in the instance region." - type = string -} - -variable "remote_state_bucket" { - description = "Backend bucket to load remote state information from previous steps." - type = string -} - -variable "github_app_installation_id" { - description = "The app installation ID that was created when installing Google Cloud Build in Github: https://github.com/apps/google-cloud-build" - type = number - -} -variable "github_remote_uri" { - description = "The remote uri of your github repository" - type = string -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/non-production/versions.tf b/5-app-infra/projects/machine-learning/business_unit_3/non-production/versions.tf deleted file mode 100644 index baa38abb..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/non-production/versions.tf +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -terraform { - required_version = ">= 0.13" - - required_providers { - - google = { - source = "hashicorp/google" - version = ">= 3.77, < 6" - } - - google-beta = { - source = "hashicorp/google-beta" - version = ">= 3.77, < 6" - } - - null = { - source = "hashicorp/null" - version = "~> 3.0" - } - - random = { - source = "hashicorp/random" - version = "~> 3.1" - } - - } -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/production/README.md b/5-app-infra/projects/machine-learning/business_unit_3/production/README.md deleted file mode 100644 index 3255fd1a..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/production/README.md +++ /dev/null @@ -1,20 +0,0 @@ - -## Inputs - -| Name | Description | Type | Default | Required | -|------|-------------|------|---------|:--------:| -| github\_app\_installation\_id | The app installation ID that was created when installing Google Cloud Build in Github: https://github.com/apps/google-cloud-build | `number` | n/a | yes | -| github\_remote\_uri | The remote uri of your github repository | `string` | n/a | yes | -| instance\_region | The region where notebook instance will be created. A subnetwork must exists in the instance region. | `string` | n/a | yes | -| remote\_state\_bucket | Backend bucket to load remote state information from previous steps. | `string` | n/a | yes | - -## Outputs - -| Name | Description | -|------|-------------| -| bucket | Generated unique name fort the bucket | -| composer | Cloud Composer Environment. | -| metadata | an identifier for the resource with format {{name}} | -| tensorboard | TensorBoard resource. | - - diff --git a/5-app-infra/projects/machine-learning/business_unit_3/production/backend.tf b/5-app-infra/projects/machine-learning/business_unit_3/production/backend.tf deleted file mode 100644 index 9a280f76..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/production/backend.tf +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -terraform { - backend "gcs" { - bucket = "UPDATE_APP_INFRA_BUCKET" - prefix = "terraform/app-infra/business_unit_3/production" - } -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/production/common.auto.tfvars b/5-app-infra/projects/machine-learning/business_unit_3/production/common.auto.tfvars deleted file mode 120000 index 39aaa462..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/production/common.auto.tfvars +++ /dev/null @@ -1 +0,0 @@ -../../common.auto.tfvars \ No newline at end of file diff --git a/5-app-infra/projects/machine-learning/business_unit_3/production/locals.tf b/5-app-infra/projects/machine-learning/business_unit_3/production/locals.tf deleted file mode 100644 index 5a1cd7c8..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/production/locals.tf +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - business_unit = "business_unit_3" - business_code = "bu3" - env = "production" - environment_code = "p" -} - diff --git a/5-app-infra/projects/machine-learning/business_unit_3/production/main.tf b/5-app-infra/projects/machine-learning/business_unit_3/production/main.tf deleted file mode 100644 index 3f847f62..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/production/main.tf +++ /dev/null @@ -1,57 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -module "base_env" { - source = "../../modules/base_env" - - env = local.env - environment_code = local.environment_code - business_code = local.business_code - project_id = local.machine_learning_project_id - - kms_keys = local.machine_learning_kms_keys - - // Composer - composer_name = "composer" - composer_airflow_config_overrides = { - core-dags_are_paused_at_creation = "true" - } - - composer_github_app_installation_id = var.github_app_installation_id - composer_github_remote_uri = var.github_remote_uri - - composer_pypi_packages = { - tensorflow = "" - google-cloud-bigquery = "" - db-dtypes = "" - google-cloud-aiplatform = "" - google-cloud-storage = "" - # tensorflow-io = "" - } - - // BigQuery - big_query_dataset_id = "census_dataset" - - // Metadata - metadata_name = "metadata-store-${local.env}" - - // Bucket - bucket_name = "ml-storage" - - // TensorBoard - tensorboard_name = "ml-tensorboard-${local.env}" -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/production/outputs.tf b/5-app-infra/projects/machine-learning/business_unit_3/production/outputs.tf deleted file mode 100644 index cd5d4fbb..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/production/outputs.tf +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// Composer - -output "composer" { - value = try(module.base_env.composer, "") - description = "Cloud Composer Environment." -} - -######################## -# Metadata # -######################## - -output "metadata" { - description = "an identifier for the resource with format {{name}}" - value = try(module.base_env.metadata, "") -} - -######################## -# Bucket # -######################## - -output "bucket" { - description = "Generated unique name fort the bucket" - value = try(module.base_env.bucket, "") -} - -######################## -# TensorBoard # -######################## - -output "tensorboard" { - description = "TensorBoard resource." - value = try(module.base_env.tensorboard, "") -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/production/remote.tf b/5-app-infra/projects/machine-learning/business_unit_3/production/remote.tf deleted file mode 100644 index dc533538..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/production/remote.tf +++ /dev/null @@ -1,40 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - machine_learning_project_id = data.terraform_remote_state.projects_env.outputs.machine_learning_project_id - machine_learning_kms_keys = data.terraform_remote_state.projects_env.outputs.machine_learning_kms_keys - service_catalog_repo_name = data.terraform_remote_state.projects_shared.outputs.service_catalog_repo_name - service_catalog_project_id = data.terraform_remote_state.projects_shared.outputs.service_catalog_project_id -} - -data "terraform_remote_state" "projects_env" { - backend = "gcs" - - config = { - bucket = var.remote_state_bucket - prefix = "terraform/projects/${local.business_unit}/${local.env}" - } -} - -data "terraform_remote_state" "projects_shared" { - backend = "gcs" - - config = { - bucket = var.remote_state_bucket - prefix = "terraform/projects/${local.business_unit}/shared" - } -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/production/variables.tf b/5-app-infra/projects/machine-learning/business_unit_3/production/variables.tf deleted file mode 100644 index c26d31ca..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/production/variables.tf +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "instance_region" { - description = "The region where notebook instance will be created. A subnetwork must exists in the instance region." - type = string -} - -variable "remote_state_bucket" { - description = "Backend bucket to load remote state information from previous steps." - type = string -} - -variable "github_app_installation_id" { - description = "The app installation ID that was created when installing Google Cloud Build in Github: https://github.com/apps/google-cloud-build" - type = number - -} -variable "github_remote_uri" { - description = "The remote uri of your github repository" - type = string -} diff --git a/5-app-infra/projects/machine-learning/business_unit_3/production/versions.tf b/5-app-infra/projects/machine-learning/business_unit_3/production/versions.tf deleted file mode 100644 index baa38abb..00000000 --- a/5-app-infra/projects/machine-learning/business_unit_3/production/versions.tf +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -terraform { - required_version = ">= 0.13" - - required_providers { - - google = { - source = "hashicorp/google" - version = ">= 3.77, < 6" - } - - google-beta = { - source = "hashicorp/google-beta" - version = ">= 3.77, < 6" - } - - null = { - source = "hashicorp/null" - version = "~> 3.0" - } - - random = { - source = "hashicorp/random" - version = "~> 3.1" - } - - } -} diff --git a/5-app-infra/projects/machine-learning/common.auto.example.tfvars b/5-app-infra/projects/machine-learning/common.auto.example.tfvars deleted file mode 100644 index 8c9be120..00000000 --- a/5-app-infra/projects/machine-learning/common.auto.example.tfvars +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -instance_region = "us-central1" // should be one of the regions used to create network on step 3-networks - -remote_state_bucket = "REMOTE_STATE_BUCKET" - -github_app_installation_id = "REPLACE_ME" - -github_remote_uri = "REPLACE_ME" diff --git a/5-app-infra/projects/machine-learning/modules/base_env/data.tf b/5-app-infra/projects/machine-learning/modules/base_env/data.tf deleted file mode 100644 index 323f4ba5..00000000 --- a/5-app-infra/projects/machine-learning/modules/base_env/data.tf +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -data "google_project" "project" { - project_id = var.project_id -} - -data "google_projects" "non-production" { - filter = "labels.application_name:machine-learning labels.environment:non-production" -} - -data "google_projects" "production" { - filter = "labels.application_name:machine-learning labels.environment:production" -} - -data "google_service_account" "non-production" { - project = data.google_projects.non-production.projects.0.project_id - account_id = "${data.google_projects.non-production.projects.0.number}-compute@developer.gserviceaccount.com" -} - -data "google_service_account" "production" { - project = data.google_projects.production.projects.0.project_id - account_id = "${data.google_projects.production.projects.0.number}-compute@developer.gserviceaccount.com" -} diff --git a/5-app-infra/projects/machine-learning/modules/base_env/iam.tf b/5-app-infra/projects/machine-learning/modules/base_env/iam.tf deleted file mode 100644 index 7cd28370..00000000 --- a/5-app-infra/projects/machine-learning/modules/base_env/iam.tf +++ /dev/null @@ -1,169 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - composer_roles = [ - "roles/composer.worker", - "projects/${var.project_id}/roles/composerServiceAccountGCS", // Cloud Storage - "projects/${var.project_id}/roles/composerServiceAccountBQ", // BigQuery - "projects/${var.project_id}/roles/composerServiceAccountBQ", // Vertex AI - ] - - cloudbuild_roles = [ - "roles/aiplatform.admin", - "roles/artifactregistry.admin", - "roles/bigquery.admin", - "roles/cloudbuild.connectionAdmin", - "roles/composer.admin", - "roles/compute.admin", - "roles/compute.instanceAdmin.v1", - "roles/compute.networkAdmin", - "roles/iam.roleAdmin", - "roles/iam.serviceAccountAdmin", - "roles/iam.serviceAccountUser", - "roles/notebooks.admin", - "roles/pubsub.admin", - "roles/resourcemanager.projectIamAdmin", - "roles/secretmanager.admin", - "roles/serviceusage.serviceUsageConsumer", - "roles/storage.admin", - ] - - service_agent_apis = [ - "aiplatform.googleapis.com", // service-PROJECT_NUMBER@gcp-sa-aiplatform.iam.gserviceaccount.com - "artifactregistry.googleapis.com", // service-PROJECT_NUMBER@gcp-sa-artifactregistry.iam.gserviceaccount.com - "bigquery.googleapis.com", // bq-PROJECT_NUMBER@bigquery-encryption.iam.gserviceaccount.com - "cloudkms.googleapis.com", // service-PROJECT_NUMBER@gcp-sa-cloudkms.iam.gserviceaccount.com - "composer.googleapis.com", // service-PROJECT_NUMBER@cloudcomposer-accounts.iam.gserviceaccount.com - "compute.googleapis.com", // service-PROJECT_NUMBER@compute-system.iam.gserviceaccount.com - "container.googleapis.com", // service-PROJECT_NUMBER@container-engine-robot.iam.gserviceaccount.com - "containerregistry.googleapis.com", // service-PROJECT_NUMBER@containerregistry.iam.gserviceaccount.com - "dataflow.googleapis.com", // service-PROJECT_NUMBER@dataflow-service-producer-prod.iam.gserviceaccount.com - "dataform.googleapis.com", // service-PROJECT_NUMBER@gcp-sa-dataform.iam.gserviceaccount.com - "notebooks.googleapis.com", // service-PROJECT_NUMBER@gcp-sa-notebooks.iam.gserviceaccount.com - "pubsub.googleapis.com", // service-PROJECT_NUMBER@gcp-sa-pubsub.iam.gserviceaccount.com - "secretmanager.googleapis.com", // service-PROJECT_NUMBER@gcp-sa-secretmanager.iam.gserviceaccount.com - "storage.googleapis.com", // service-PROJECT_NUMBER@gs-project-accounts.iam.gserviceaccount.com - ] - - service_agents = [ - "service-${data.google_project.project.number}@gcp-sa-aiplatform.iam.gserviceaccount.com", // aiplatform.googleapis.com - "service-${data.google_project.project.number}@gcp-sa-artifactregistry.iam.gserviceaccount.com", // artifactregistry.googleapis.com - "bq-${data.google_project.project.number}@bigquery-encryption.iam.gserviceaccount.com", // bigquery.googleapis.com - "service-${data.google_project.project.number}@gcp-sa-cloudkms.iam.gserviceaccount.com", // cloudkms.googleapis.com - "service-${data.google_project.project.number}@cloudcomposer-accounts.iam.gserviceaccount.com", // composer.googleapis.com - "service-${data.google_project.project.number}@compute-system.iam.gserviceaccount.com", // compute.googleapis.com - "service-${data.google_project.project.number}@container-engine-robot.iam.gserviceaccount.com", // container.googleapis.com - "service-${data.google_project.project.number}@containerregistry.iam.gserviceaccount.com", // containerregistry.googleapis.com - "service-${data.google_project.project.number}@dataflow-service-producer-prod.iam.gserviceaccount.com", // dataflow.googleapis.com - "service-${data.google_project.project.number}@gcp-sa-dataform.iam.gserviceaccount.com", // dataform.googleapis.com - "service-${data.google_project.project.number}@gcp-sa-notebooks.iam.gserviceaccount.com", // notebooks.googleapis.com - "service-${data.google_project.project.number}@gcp-sa-pubsub.iam.gserviceaccount.com", // pubsub.googleapis.com - "service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com", // secretmanager.googleapis.com - "service-${data.google_project.project.number}@gs-project-accounts.iam.gserviceaccount.com", // storage.googleapis.com - ] - - service_agent_key_binding = flatten([ - for r, k in var.kms_keys : [ - for sa in local.service_agents : { region = r, email = sa, key = k } - ] - ]) -} - -################################ -### Composer Service Account ### -################################ -resource "google_service_account" "composer" { - account_id = format("%s-%s-%s", var.service_account_prefix, var.environment_code, "composer") - display_name = "${title(var.env)} Composer Service Account" - description = "Service account to be used by Cloud Composer" - project = var.project_id -} - -resource "google_project_iam_member" "composer_project_iam" { - for_each = toset(local.composer_roles) - - project = var.project_id - role = each.key - member = "serviceAccount:${google_service_account.composer.email}" -} - -resource "google_kms_crypto_key_iam_member" "composer_kms_key_binding" { - for_each = var.kms_keys - crypto_key_id = each.value.id - role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" - member = "serviceAccount:${google_service_account.composer.email}" -} - -resource "google_service_account_iam_member" "composer_service_agent" { - provider = google-beta - service_account_id = google_service_account.composer.id - role = "roles/composer.ServiceAgentV2Ext" - member = "serviceAccount:service-${data.google_project.project.number}@cloudcomposer-accounts.iam.gserviceaccount.com" -} - -resource "google_service_account_iam_member" "compute_non_production" { - count = var.env == "non-production" ? 1 : 0 - provider = google-beta - service_account_id = data.google_service_account.non-production.id - role = "roles/iam.serviceAccountUser" - member = "serviceAccount:${data.google_projects.production.projects.0.number}-compute@developer.gserviceaccount.com" -} - -resource "google_service_account_iam_member" "compute_production" { - count = var.env == "production" ? 1 : 0 - provider = google-beta - service_account_id = data.google_service_account.production.id - role = "roles/iam.serviceAccountUser" - member = "serviceAccount:${data.google_projects.non-production.projects.0.number}-compute@developer.gserviceaccount.com" -} - -###################### -### Service Agents ### -###################### -resource "google_project_service_identity" "service_agent" { - provider = google-beta - for_each = toset(local.service_agent_apis) - - project = var.project_id - service = each.value -} - -resource "time_sleep" "wait_30_seconds" { - create_duration = "30s" - - depends_on = [google_project_service_identity.service_agent] -} - -resource "google_kms_crypto_key_iam_member" "service_agent_kms_key_binding" { - for_each = { for k in local.service_agent_key_binding : "${k.email}-${k.region}" => k } - - crypto_key_id = each.value.key.id - role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" - member = "serviceAccount:${each.value.email}" - - depends_on = [time_sleep.wait_30_seconds] -} - -######################## -# Service Catalog # -######################## -resource "google_project_iam_member" "cloud_build" { - for_each = { for k, v in toset(local.cloudbuild_roles) : k => v if var.env == "development" } - project = var.project_id - role = each.key - member = "serviceAccount:${data.google_project.project.number}@cloudbuild.gserviceaccount.com" -} diff --git a/5-app-infra/projects/machine-learning/modules/base_env/main.tf b/5-app-infra/projects/machine-learning/modules/base_env/main.tf deleted file mode 100644 index 791fdfbe..00000000 --- a/5-app-infra/projects/machine-learning/modules/base_env/main.tf +++ /dev/null @@ -1,121 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -######################## -# Composer # -######################## - -module "composer" { - count = var.env != "development" ? 1 : 0 - source = "git::https://source.developers.google.com/p/SERVICE-CATALOG-PROJECT-ID/r/service-catalog//modules/composer?ref=main" - - project_id = var.project_id - name = var.composer_name - airflow_config_overrides = var.composer_airflow_config_overrides - github_remote_uri = var.composer_github_remote_uri - github_app_installation_id = var.composer_github_app_installation_id - - region = var.region - labels = var.composer_labels - maintenance_window = var.composer_maintenance_window - env_variables = var.composer_env_variables - image_version = var.composer_image_version - pypi_packages = var.composer_pypi_packages - python_version = var.composer_python_version - web_server_allowed_ip_ranges = var.composer_web_server_allowed_ip_ranges - - depends_on = [google_service_account.composer, google_kms_crypto_key_iam_member.service_agent_kms_key_binding] -} - -######################## -# Big Query # -######################## - -module "big_query" { - count = var.env != "development" ? 1 : 0 - source = "git::https://source.developers.google.com/p/SERVICE-CATALOG-PROJECT-ID/r/service-catalog//modules/bigquery?ref=main" - - project_id = var.project_id - dataset_id = var.big_query_dataset_id - - region = var.region - friendly_name = var.big_query_friendly_name - description = var.big_query_description - default_partition_expiration_ms = var.big_query_default_partition_expiration_ms - default_table_expiration_ms = var.big_query_default_table_expiration_ms - delete_contents_on_destroy = var.big_query_delete_contents_on_destroy - - depends_on = [google_kms_crypto_key_iam_member.service_agent_kms_key_binding] -} - -######################## -# Metadata # -######################## - -module "metadata" { - count = var.env != "development" ? 1 : 0 - source = "git::https://source.developers.google.com/p/SERVICE-CATALOG-PROJECT-ID/r/service-catalog//modules/metadata?ref=main" - - project_id = var.project_id - name = var.metadata_name - - region = var.region - - depends_on = [google_kms_crypto_key_iam_member.service_agent_kms_key_binding] -} - -######################## -# Bucket # -######################## - -module "bucket" { - count = var.env != "development" ? 1 : 0 - source = "git::https://source.developers.google.com/p/SERVICE-CATALOG-PROJECT-ID/r/service-catalog//modules/bucket?ref=main" - - project_id = var.project_id - name = var.bucket_name - - region = var.region - dual_region_locations = var.bucket_dual_region_locations - force_destroy = var.bucket_force_destroy - lifecycle_rules = var.bucket_lifecycle_rules - retention_policy = var.bucket_retention_policy - object_folder_temporary_hold = var.bucket_object_folder_temporary_hold - labels = var.bucket_labels - add_random_suffix = var.bucket_add_random_suffix - uniform_bucket_level_access = var.bucket_uniform_bucket_level_access - storage_class = var.bucket_storage_class - requester_pays = var.bucket_requester_pays - gcs_bucket_prefix = var.gcs_bucket_prefix - - depends_on = [google_kms_crypto_key_iam_member.service_agent_kms_key_binding] -} - -######################## -# TensorBoard # -######################## - -module "tensorboard" { - count = var.env != "development" ? 1 : 0 - source = "git::https://source.developers.google.com/p/SERVICE-CATALOG-PROJECT-ID/r/service-catalog//modules/tensorboard?ref=main" - - project_id = var.project_id - name = var.tensorboard_name - - region = var.region - - depends_on = [google_kms_crypto_key_iam_member.service_agent_kms_key_binding] -} diff --git a/5-app-infra/projects/machine-learning/modules/base_env/outputs.tf b/5-app-infra/projects/machine-learning/modules/base_env/outputs.tf deleted file mode 100644 index cc8800ca..00000000 --- a/5-app-infra/projects/machine-learning/modules/base_env/outputs.tf +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -######################## -# Composer # -######################## - -output "composer" { - value = try(module.composer, "") - description = "Cloud Composer Environment." -} - -######################## -# Metadata # -######################## - -output "metadata" { - description = "an identifier for the resource with format {{name}}" - value = try(module.metadata, "") -} - -######################## -# Bucket # -######################## -output "bucket" { - description = "Generated unique name fort the bucket" - value = try(module.bucket, "") -} - -######################## -# TensorBoard # -######################## - -output "tensorboard" { - description = "TensorBoard object." - value = try(module.tensorboard, "") -} diff --git a/5-app-infra/projects/machine-learning/modules/base_env/roles.tf b/5-app-infra/projects/machine-learning/modules/base_env/roles.tf deleted file mode 100644 index 335683b6..00000000 --- a/5-app-infra/projects/machine-learning/modules/base_env/roles.tf +++ /dev/null @@ -1,293 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -resource "google_project_iam_custom_role" "composer-sa-gcs" { - project = var.project_id - role_id = "composerServiceAccountGCS" - title = "Composer Service Account Cloud Storage" - description = "Provides access to cloud storage for Cloud Composer service accounts" - permissions = [ - # Storage Object Admin - "resourcemanager.projects.get", - "storage.objects.create", - "storage.objects.delete", - "storage.objects.get", - "storage.objects.getIamPolicy", - "storage.objects.list", - "storage.objects.update", - ] -} - -resource "google_project_iam_custom_role" "composer-sa-bq" { - project = var.project_id - role_id = "composerServiceAccountBQ" - title = "Composer Service Account BigQuery" - description = "Provides access to BigQuery for Cloud Composer service accounts" - permissions = [ - # BigQuery Data Editor - "bigquery.datasets.create", - "bigquery.datasets.get", - "bigquery.datasets.getIamPolicy", - "bigquery.datasets.updateTag", - "bigquery.models.create", - "bigquery.models.delete", - "bigquery.models.export", - "bigquery.models.getData", - "bigquery.models.getMetadata", - "bigquery.models.list", - "bigquery.models.updateData", - "bigquery.models.updateMetadata", - "bigquery.models.updateTag", - "bigquery.routines.create", - "bigquery.routines.delete", - "bigquery.routines.get", - "bigquery.routines.list", - "bigquery.routines.update", - "bigquery.routines.updateTag", - "bigquery.tables.create", - "bigquery.tables.createSnapshot", - "bigquery.tables.delete", - "bigquery.tables.export", - "bigquery.tables.get", - "bigquery.tables.getData", - "bigquery.tables.getIamPolicy", - "bigquery.tables.list", - "bigquery.tables.restoreSnapshot", - "bigquery.tables.update", - "bigquery.tables.updateData", - "bigquery.tables.updateTag", - "resourcemanager.projects.get", - "bigquery.jobs.create" - ] -} - -resource "google_project_iam_custom_role" "composer-sa-vertex" { - project = var.project_id - role_id = "composerServiceAccountVertex" - title = "Composer Service Account Vertex AI" - description = "Provides access to Vertex for Cloud Composer service accounts" - permissions = [ - # Vertex AI User - "aiplatform.annotations.create", - "aiplatform.annotations.delete", - "aiplatform.annotations.get", - "aiplatform.annotations.list", - "aiplatform.annotations.update", - "aiplatform.annotationSpecs.create", - "aiplatform.annotationSpecs.delete", - "aiplatform.annotationSpecs.get", - "aiplatform.annotationSpecs.list", - "aiplatform.annotationSpecs.update", - "aiplatform.artifacts.create", - "aiplatform.artifacts.delete", - "aiplatform.artifacts.get", - "aiplatform.artifacts.list", - "aiplatform.artifacts.update", - "aiplatform.batchPredictionJobs.cancel", - "aiplatform.batchPredictionJobs.create", - "aiplatform.batchPredictionJobs.delete", - "aiplatform.batchPredictionJobs.get", - "aiplatform.batchPredictionJobs.list", - "aiplatform.contexts.addContextArtifactsAndExecutions", - "aiplatform.contexts.addContextChildren", - "aiplatform.contexts.create", - "aiplatform.contexts.delete", - "aiplatform.contexts.get", - "aiplatform.contexts.list", - "aiplatform.contexts.queryContextLineageSubgraph", - "aiplatform.contexts.update", - "aiplatform.customJobs.cancel", - "aiplatform.customJobs.create", - "aiplatform.customJobs.delete", - "aiplatform.customJobs.get", - "aiplatform.customJobs.list", - "aiplatform.dataItems.create", - "aiplatform.dataItems.delete", - "aiplatform.dataItems.get", - "aiplatform.dataItems.list", - "aiplatform.dataItems.update", - "aiplatform.dataLabelingJobs.cancel", - "aiplatform.dataLabelingJobs.create", - "aiplatform.dataLabelingJobs.delete", - "aiplatform.dataLabelingJobs.get", - "aiplatform.dataLabelingJobs.list", - "aiplatform.datasets.create", - "aiplatform.datasets.delete", - "aiplatform.datasets.export", - "aiplatform.datasets.get", - "aiplatform.datasets.import", - "aiplatform.datasets.list", - "aiplatform.datasets.update", - "aiplatform.edgeDeploymentJobs.create", - "aiplatform.edgeDeploymentJobs.delete", - "aiplatform.edgeDeploymentJobs.get", - "aiplatform.edgeDeploymentJobs.list", - "aiplatform.edgeDeviceDebugInfo.get", - "aiplatform.edgeDevices.create", - "aiplatform.edgeDevices.delete", - "aiplatform.edgeDevices.get", - "aiplatform.edgeDevices.list", - "aiplatform.edgeDevices.update", - "aiplatform.endpoints.create", - "aiplatform.endpoints.delete", - "aiplatform.endpoints.deploy", - "aiplatform.endpoints.explain", - "aiplatform.endpoints.get", - "aiplatform.endpoints.list", - "aiplatform.endpoints.predict", - "aiplatform.endpoints.undeploy", - "aiplatform.endpoints.update", - "aiplatform.entityTypes.create", - "aiplatform.entityTypes.delete", - "aiplatform.entityTypes.exportFeatureValues", - "aiplatform.entityTypes.get", - "aiplatform.entityTypes.importFeatureValues", - "aiplatform.entityTypes.list", - "aiplatform.entityTypes.readFeatureValues", - "aiplatform.entityTypes.streamingReadFeatureValues", - "aiplatform.entityTypes.update", - "aiplatform.entityTypes.writeFeatureValues", - "aiplatform.executions.addExecutionEvents", - "aiplatform.executions.create", - "aiplatform.executions.delete", - "aiplatform.executions.get", - "aiplatform.executions.list", - "aiplatform.executions.queryExecutionInputsAndOutputs", - "aiplatform.executions.update", - "aiplatform.features.create", - "aiplatform.features.delete", - "aiplatform.features.get", - "aiplatform.features.list", - "aiplatform.features.update", - "aiplatform.featurestores.batchReadFeatureValues", - "aiplatform.featurestores.create", - "aiplatform.featurestores.delete", - "aiplatform.featurestores.exportFeatures", - "aiplatform.featurestores.get", - "aiplatform.featurestores.importFeatures", - "aiplatform.featurestores.list", - "aiplatform.featurestores.readFeatures", - "aiplatform.featurestores.update", - "aiplatform.featurestores.writeFeatures", - "aiplatform.humanInTheLoops.create", - "aiplatform.humanInTheLoops.delete", - "aiplatform.humanInTheLoops.get", - "aiplatform.humanInTheLoops.list", - "aiplatform.humanInTheLoops.send", - "aiplatform.humanInTheLoops.update", - "aiplatform.hyperparameterTuningJobs.cancel", - "aiplatform.hyperparameterTuningJobs.create", - "aiplatform.hyperparameterTuningJobs.delete", - "aiplatform.hyperparameterTuningJobs.get", - "aiplatform.hyperparameterTuningJobs.list", - "aiplatform.indexEndpoints.create", - "aiplatform.indexEndpoints.delete", - "aiplatform.indexEndpoints.deploy", - "aiplatform.indexEndpoints.get", - "aiplatform.indexEndpoints.list", - "aiplatform.indexEndpoints.undeploy", - "aiplatform.indexEndpoints.update", - "aiplatform.indexes.create", - "aiplatform.indexes.delete", - "aiplatform.indexes.get", - "aiplatform.indexes.list", - "aiplatform.indexes.update", - "aiplatform.locations.get", - "aiplatform.locations.list", - "aiplatform.metadataSchemas.create", - "aiplatform.metadataSchemas.delete", - "aiplatform.metadataSchemas.get", - "aiplatform.metadataSchemas.list", - "aiplatform.metadataStores.create", - "aiplatform.metadataStores.delete", - "aiplatform.metadataStores.get", - "aiplatform.metadataStores.list", - "aiplatform.modelDeploymentMonitoringJobs.create", - "aiplatform.modelDeploymentMonitoringJobs.delete", - "aiplatform.modelDeploymentMonitoringJobs.get", - "aiplatform.modelDeploymentMonitoringJobs.list", - "aiplatform.modelDeploymentMonitoringJobs.pause", - "aiplatform.modelDeploymentMonitoringJobs.resume", - "aiplatform.modelDeploymentMonitoringJobs.searchStatsAnomalies", - "aiplatform.modelDeploymentMonitoringJobs.update", - "aiplatform.modelEvaluations.exportEvaluatedDataItems", - "aiplatform.modelEvaluations.get", - "aiplatform.modelEvaluations.list", - "aiplatform.modelEvaluationSlices.get", - "aiplatform.modelEvaluationSlices.list", - "aiplatform.models.delete", - "aiplatform.models.export", - "aiplatform.models.get", - "aiplatform.models.list", - "aiplatform.models.update", - "aiplatform.models.upload", - "aiplatform.nasJobs.cancel", - "aiplatform.nasJobs.create", - "aiplatform.nasJobs.delete", - "aiplatform.nasJobs.get", - "aiplatform.nasJobs.list", - "aiplatform.operations.list", - "aiplatform.pipelineJobs.cancel", - "aiplatform.pipelineJobs.create", - "aiplatform.pipelineJobs.delete", - "aiplatform.pipelineJobs.get", - "aiplatform.pipelineJobs.list", - "aiplatform.specialistPools.create", - "aiplatform.specialistPools.delete", - "aiplatform.specialistPools.get", - "aiplatform.specialistPools.list", - "aiplatform.specialistPools.update", - "aiplatform.studies.create", - "aiplatform.studies.delete", - "aiplatform.studies.get", - "aiplatform.studies.list", - "aiplatform.studies.update", - "aiplatform.tensorboardExperiments.create", - "aiplatform.tensorboardExperiments.delete", - "aiplatform.tensorboardExperiments.get", - "aiplatform.tensorboardExperiments.list", - "aiplatform.tensorboardExperiments.update", - "aiplatform.tensorboardExperiments.write", - "aiplatform.tensorboardRuns.create", - "aiplatform.tensorboardRuns.delete", - "aiplatform.tensorboardRuns.get", - "aiplatform.tensorboardRuns.list", - "aiplatform.tensorboardRuns.update", - "aiplatform.tensorboardRuns.write", - "aiplatform.tensorboards.create", - "aiplatform.tensorboards.delete", - "aiplatform.tensorboards.get", - "aiplatform.tensorboards.list", - "aiplatform.tensorboards.update", - "aiplatform.tensorboardTimeSeries.create", - "aiplatform.tensorboardTimeSeries.delete", - "aiplatform.tensorboardTimeSeries.get", - "aiplatform.tensorboardTimeSeries.list", - "aiplatform.tensorboardTimeSeries.read", - "aiplatform.tensorboardTimeSeries.update", - "aiplatform.trainingPipelines.cancel", - "aiplatform.trainingPipelines.create", - "aiplatform.trainingPipelines.delete", - "aiplatform.trainingPipelines.get", - "aiplatform.trainingPipelines.list", - "aiplatform.trials.create", - "aiplatform.trials.delete", - "aiplatform.trials.get", - "aiplatform.trials.list", - "aiplatform.trials.update", - "resourcemanager.projects.get", - ] -} diff --git a/5-app-infra/projects/machine-learning/modules/base_env/variables.tf b/5-app-infra/projects/machine-learning/modules/base_env/variables.tf deleted file mode 100644 index fe351a4c..00000000 --- a/5-app-infra/projects/machine-learning/modules/base_env/variables.tf +++ /dev/null @@ -1,380 +0,0 @@ -/** - * Copyright 2024 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -######################## -# Global Inputs # -######################## - -variable "env" { - description = "Environment name. (ex. production)" - type = string -} - -variable "environment_code" { - description = "Environment code. (ex. p for production)" - type = string -} - -variable "business_code" { - description = "Business unit code (ie. bu3)" - type = string - -} - -variable "project_id" { - description = "Environments Machine Learning Project ID" - type = string -} - -variable "region" { - type = string - description = "The resource region, one of [us-central1, us-east4]." - default = "us-central1" - validation { - condition = contains(["us-central1", "us-east4"], var.region) - error_message = "Region must be one of [us-central1, us-east4]." - } -} - -variable "kms_keys" { - description = "Project's KMS Crypto keys." - type = map(any) -} - -variable "gcs_bucket_prefix" { - description = "Name prefix to be used for GCS Bucket" - type = string - default = "bkt" -} - -variable "service_account_prefix" { - description = "Name prefix to use for service accounts." - type = string - default = "sa" -} - -######################## -# Composer # -######################## - -variable "composer_name" { - description = "Name of Composer environment" - type = string - default = null -} - -variable "composer_github_remote_uri" { - description = "Url of your github repo" - type = string - default = null -} - -variable "composer_github_app_installation_id" { - description = "The app installation ID that was created when installing Google Cloud Build in Github: https://github.com/apps/google-cloud-build" - type = number - default = null -} - -variable "composer_labels" { - type = map(string) - description = "The resource labels (a map of key/value pairs) to be applied to the Cloud Composer." - default = {} -} - -variable "composer_maintenance_window" { - type = object({ - start_time = string - end_time = string - recurrence = string - }) - - description = "The configuration settings for Cloud Composer maintenance window." - - # Set Start time, Timezone, Days, and Length, so that combined time for the - # specified schedule is at least 12 hours in a 7-day rolling window. For example, - # a period of 4 hours every Monday, Wednesday, and Friday provides the required amount of time. - - # 12-hour maintenance window between 01:00 and 13:00 (UTC) on Sundays - default = { - start_time = "2021-01-01T01:00:00Z" - end_time = "2021-01-01T13:00:00Z" - recurrence = "FREQ=WEEKLY;BYDAY=SU" - } -} - -variable "composer_airflow_config_overrides" { - type = map(string) - description = "Airflow configuration properties to override. Property keys contain the section and property names, separated by a hyphen, for example \"core-dags_are_paused_at_creation\"." - default = {} -} - -variable "composer_env_variables" { - type = map(any) - description = "Additional environment variables to provide to the Apache Airflow scheduler, worker, and webserver processes. Environment variable names must match the regular expression [a-zA-Z_][a-zA-Z0-9_]*. They cannot specify Apache Airflow software configuration overrides (they cannot match the regular expression AIRFLOW__[A-Z0-9_]+__[A-Z0-9_]+), and they cannot match any of the following reserved names: [AIRFLOW_HOME,C_FORCE_ROOT,CONTAINER_NAME,DAGS_FOLDER,GCP_PROJECT,GCS_BUCKET,GKE_CLUSTER_NAME,SQL_DATABASE,SQL_INSTANCE,SQL_PASSWORD,SQL_PROJECT,SQL_REGION,SQL_USER]" - default = {} -} - -variable "composer_image_version" { - type = string - description = "The version of the aiflow running in the cloud composer environment." - default = "composer-2.5.2-airflow-2.6.3" - validation { - condition = can(regex("^composer-([2-9]|[1-9][0-9]+)\\..*$", var.composer_image_version)) - error_message = "The airflow_image_version must be GCP Composer version 2 or higher (e.g., composer-2.x.x-airflow-x.x.x)." - } -} - -variable "composer_pypi_packages" { - type = map(string) - description = " Custom Python Package Index (PyPI) packages to be installed in the environment. Keys refer to the lowercase package name (e.g. \"numpy\")." - default = {} -} - -variable "composer_python_version" { - description = "The default version of Python used to run the Airflow scheduler, worker, and webserver processes." - type = string - default = "3" -} - -variable "composer_web_server_allowed_ip_ranges" { - description = "The network-level access control policy for the Airflow web server. If unspecified, no network-level access restrictions will be applied." - default = null - type = list(object({ - value = string - description = string - })) -} - -variable "composer_github_secret_name" { - description = "Name of the github secret to extract github token info" - type = string - default = "github-api-token" -} - -######################## -# Big Query # -######################## - -variable "big_query_dataset_id" { - description = "A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters." - type = string - default = null -} - -variable "big_query_friendly_name" { - description = "A descriptive name for the dataset" - type = string - default = "" -} - -variable "big_query_description" { - description = "A user-friendly description of the dataset" - type = string - default = "" -} - -variable "big_query_default_partition_expiration_ms" { - description = "The default partition expiration for all partitioned tables in the dataset, in milliseconds. Once this property is set, all newly-created partitioned tables in the dataset will have an expirationMs property in the timePartitioning settings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value." - type = number - default = null -} - -variable "big_query_default_table_expiration_ms" { - description = "The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property." - type = number - default = null -} - -variable "big_query_delete_contents_on_destroy" { - description = "If true, delete all the tables in the dataset when destroying the dataset; otherwise, destroying the dataset does not affect the tables in the dataset. If you try to delete a dataset that contains tables, and you set delete_contents_on_destroy to false when you created the dataset, the request will fail. Always use this flag with caution. A missing value is treated as false." - type = bool - default = false -} - -######################## -# Metadata # -######################## - -variable "metadata_name" { - type = string - description = "The name of the metadata store instance" - default = null -} - -######################## -# Bucket # -######################## - -variable "bucket_name" { - type = string - description = "name of bucket" - default = null -} - -variable "bucket_dual_region_locations" { - type = list(string) - default = [] - description = "dual region description" - validation { - condition = length(var.bucket_dual_region_locations) == 0 || length(var.bucket_dual_region_locations) == 2 - error_message = "Exactly 0 or 2 regions expected." - } -} - -variable "bucket_force_destroy" { - type = bool - description = "(Optional, Default: true) When deleting a bucket, this boolean option will delete all contained objects. If you try to delete a bucket that contains objects, Terraform will fail that run." - default = true -} - -variable "bucket_versioning_enabled" { - type = bool - description = "Whether to enable versioning or not" - default = true -} - -variable "bucket_lifecycle_rules" { - type = set(object({ - # Object with keys: - # - type - The type of the action of this Lifecycle Rule. Supported values: Delete and SetStorageClass. - # - storage_class - (Required if action type is SetStorageClass) The target Storage Class of objects affected by this Lifecycle Rule. - action = map(string) - - # Object with keys: - # - age - (Optional) Minimum age of an object in days to satisfy this condition. - # - created_before - (Optional) Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition. - # - with_state - (Optional) Match to live and/or archived objects. Supported values include: "LIVE", "ARCHIVED", "ANY". - # - matches_storage_class - (Optional) Comma delimited string for storage class of objects to satisfy this condition. Supported values include: MULTI_REGIONAL, REGIONAL. - # - num_newer_versions - (Optional) Relevant only for versioned objects. The number of newer versions of an object to satisfy this condition. - # - custom_time_before - (Optional) A date in the RFC 3339 format YYYY-MM-DD. This condition is satisfied when the customTime metadata for the object is set to an earlier date than the date used in this lifecycle condition. - # - days_since_custom_time - (Optional) The number of days from the Custom-Time metadata attribute after which this condition becomes true. - # - days_since_noncurrent_time - (Optional) Relevant only for versioned objects. Number of days elapsed since the noncurrent timestamp of an object. - # - noncurrent_time_before - (Optional) Relevant only for versioned objects. The date in RFC 3339 (e.g. 2017-06-13) when the object became nonconcurrent. - condition = map(string) - })) - description = "List of lifecycle rules to configure. Format is the same as described in provider documentation https://www.terraform.io/docs/providers/google/r/storage_bucket.html#lifecycle_rule except condition.matches_storage_class should be a comma delimited string." - default = [ - { - #Deletion Rules - #Control ID: GCS-CO-6.5 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - action = { - type = "SetStorageClass" - storage_class = "NEARLINE" - } - condition = { - age = "30" - matches_storage_class = "REGIONAL" - } - }, - { - #Deletion Rules - #Control ID: GCS-CO-6.6 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - action = { - type = "Delete" - } - condition = { - with_state = "ARCHIVED" - } - } - ] -} - -variable "bucket_retention_policy" { - type = any - default = {} - description = "Map of retention policy values. Format is the same as described in provider documentation https://www.terraform.io/docs/providers/google/r/storage_bucket#retention_policy" -} - -variable "bucket_object_folder_temporary_hold" { - type = bool - default = false - description = "Set root folder temporary hold according to security control GCS-CO-6.16, toggle off to allow for object deletion." -} - -#Labeling Tag -#Control ID: GCS-CO-6.4 -#NIST 800-53: SC-12 -#CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - -variable "bucket_labels" { - description = "Labels to be attached to the buckets" - type = map(string) - default = { - #Labelling tag - #Control ID: GCS-CO-6.4 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - label = "samplelabel" - - #Owner Tag - #Control ID: GCS-CO-6.8 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - owner = "testowner" - - #Classification Tag - #Control ID: GCS-CO-6.18 - #NIST 800-53: SC-12 - #CRI Profile: PR.IP-2.1 PR.IP-2.2 PR.IP-2.3 - - classification = "dataclassification" - } -} - -variable "bucket_add_random_suffix" { - description = "whether to add a random suffix to the bucket name" - type = bool - default = false -} - -variable "bucket_uniform_bucket_level_access" { - description = "Whether to have uniform access levels or not" - type = bool - default = true -} - -variable "bucket_storage_class" { - type = string - description = "Storage class to create the bucket" - default = "STANDARD" - validation { - condition = contains(["STANDARD", "MULTI_REGIONAL", "REGIONAL", "NEARLINE", "COLDLINE", "ARCHIVE"], var.bucket_storage_class) - error_message = "Storage class can be one of STANDARD, MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE." - } -} - -variable "bucket_requester_pays" { - description = "Enables Requester Pays on a storage bucket." - type = bool - default = false -} - - -######################## -# TensorBoard # -######################## - -variable "tensorboard_name" { - type = string - description = "The name of the metadata store instance" - default = null -} diff --git a/5-app-infra/projects/service-catalog/README.md b/5-app-infra/projects/service-catalog/README.md index 4a70e7df..e69de29b 100644 --- a/5-app-infra/projects/service-catalog/README.md +++ b/5-app-infra/projects/service-catalog/README.md @@ -1,361 +0,0 @@ -# 5-app-infra - -This repo is part of a multi-part guide that shows how to configure and deploy -the example.com reference architecture described in -[Google Cloud security foundations guide](https://cloud.google.com/architecture/security-foundations). The following table lists the parts of the guide. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
0-bootstrapBootstraps a Google Cloud organization, creating all the required resources -and permissions to start using the Cloud Foundation Toolkit (CFT). This -step also configures a CI/CD Pipeline for foundations code in subsequent -stages.
1-orgSets up top-level shared folders, monitoring and networking projects, -organization-level logging, and baseline security settings through -organizational policies.
2-environmentsSets up development, non-production, and production environments within the -Google Cloud organization that you've created.
3-networks-dual-svpcSets up base and restricted shared VPCs with default DNS, NAT (optional), -Private Service networking, VPC service controls, on-premises Dedicated -Interconnect, and baseline firewall rules for each environment. It also sets -up the global DNS hub.
3-networks-hub-and-spokeSets up base and restricted shared VPCs with all the default configuration -found on step 3-networks-dual-svpc, but here the architecture will be based on the -Hub and Spoke network model. It also sets up the global DNS hub
4-projectsSets up a folder structure, projects, and an application infrastructure pipeline for applications, - which are connected as service projects to the shared VPC created in the previous stage.
5-app-infra - projects/service-catalog (this file)Deploys Composer and a pipeline
- -For an overview of the architecture and the parts, see the -[terraform-google-enterprise-genai README](https://github.com/terraform-google-modules/terraform-google-enterprise-genai) -file. - -## Purpose(s) -This project has two main purposes: - -1. To deploy a pipeline and a bucket which is linked to a Google Cloud Repository that houses terraform modules for the use in Service Catalog. -Although Service Catalog itself must be manually deployed, the modules which will be used can still be automated. - -2. To deploy infrastructure for operational environments (ie. `non-production` & `production`.) - -The resoning behind utilizing one repository with two deployment methodologies is due to how close interactive (`development`) and operational environments are. - -The repository has the structure (truncated for brevity): - ``` - business_unit_3 - ├── development - ├── non-production - ├── production - modules - ├── bucket - │   ├── README.md - │   ├── data.tf - │   ├── main.tf - │   ├── outputs.tf - │   ├── provider.tf - │   └── variables.tf - ├── composer - │   ├── README.md - │   ├── data.tf - │   ├── iam.roles.tf - │   ├── iam.users.tf - │   ├── locals.tf - │   ├── main.tf - │   ├── outputs.tf - │   ├── provider.tf - │   ├── terraform.tfvars.example - │   ├── variables.tf - │   └── vpc.tf - ├── cryptography - │   ├── README.md - │   ├── crypto_key - │   │   ├── main.tf - │   │   ├── outputs.tf - │   │   └── variables.tf - │   └── key_ring - │   ├── main.tf - │   ├── outputs.tf - │   └── variables.tf - ``` -Each folder under `modules` represents a terraform module. -When there is a change in any of the terraform module folders, the pipeline will find whichever module has been changed since the last push, `tar.gz` that file and place it in a bucket for Service Catalog to access. - -This pipeline is listening to the `main` branch of this repository for changes in order for the modules to be uploaded to service catalog. - -The pipeline also listens for changes made to `plan`, `development`, `non-production` & `production` branches, this is used for deploying infrastructure to each project. - - -The pipeline can be accessed by navigating to the project name created in step-4: - -```bash -terraform -chdir="../terraform-google-enterprise-genai/4-projects/business_unit_3/shared/" output -raw service_catalog_project_id -``` -## Prerequisites - -1. 0-bootstrap executed successfully. -1. 1-org executed successfully. -1. 2-environments executed successfully. -1. 3-networks executed successfully. -1. 4-projects executed successfully. - -### Troubleshooting - -Please refer to [troubleshooting](../docs/TROUBLESHOOTING.md) if you run into issues during this step. - -## Usage - -**Note:** If you are using MacOS, replace `cp -RT` with `cp -R` in the relevant -commands. The `-T` flag is needed for Linux, but causes problems for MacOS. - -### Deploying with Cloud Build - -1. Clone the `gcp-policies` repo based on the Terraform output from the `0-bootstrap` step. -Clone the repo at the same level of the `terraform-google-enterprise-genai` folder, the following instructions assume this layout. -Run `terraform output cloudbuild_project_id` in the `0-bootstrap` folder to get the Cloud Build Project ID. - - ```bash - export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${INFRA_PIPELINE_PROJECT_ID} - - gcloud source repos clone gcp-policies gcp-policies-app-infra --project=${INFRA_PIPELINE_PROJECT_ID} - ``` - - **Note:** `gcp-policies` repo has the same name as the repo created in step `1-org`. In order to prevent a collision, the previous command will clone this repo in the folder `gcp-policies-app-infra`. - -1. Navigate into the repo and copy contents of policy-library to new repo. All subsequent steps assume you are running them - from the gcp-policies-app-infra directory. If you run them from another directory, - adjust your copy paths accordingly. - - ```bash - cd gcp-policies-app-infra - git checkout -b main - - cp -RT ../terraform-google-enterprise-genai/policy-library/ . - ``` - -1. Commit changes and push your main branch to the new repo. - - ```bash - git add . - git commit -m 'Initialize policy library repo' - - git push --set-upstream origin main - ``` - -1. Navigate out of the repo. - - ```bash - cd .. - ``` - -1. Clone the `bu3-service-catalog` repo. - - ```bash - gcloud source repos clone bu3-service-catalog --project=${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Navigate into the repo, change to non-main branch and copy contents of foundation to new repo. - All subsequent steps assume you are running them from the bu3-service-catalog directory. - If you run them from another directory, adjust your copy paths accordingly. - - ```bash - cd bu3-service-catalog - git checkout -b plan - - cp -RT ../terraform-google-enterprise-genai/5-app-infra/projects/service-catalog/ . - cp ../terraform-google-enterprise-genai/build/cloudbuild-tf-* . - cp ../terraform-google-enterprise-genai/build/tf-wrapper.sh . - chmod 755 ./tf-wrapper.sh - ``` - -1. Rename `common.auto.example.tfvars` to `common.auto.tfvars`. - - ```bash - mv common.auto.example.tfvars common.auto.tfvars - ``` - -1. Update the file with values from your environment and 0-bootstrap. See any of the business unit 1 envs folders [README.md](./business_unit_1/production/README.md) files for additional information on the values in the `common.auto.tfvars` file. - - ```bash - export remote_state_bucket=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw projects_gcs_bucket_tfstate) - echo "remote_state_bucket = ${remote_state_bucket}" - sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-service-catalog"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -1. Commit changes. - - ```bash - git add . - git commit -m 'Initialize repo' - ``` - -1. Push your plan branch to trigger a plan for all environments. Because the - _plan_ branch is not a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), pushing your _plan_ - branch triggers _terraform plan_ but not _terraform apply_. Review the plan output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git push --set-upstream origin plan - ``` - -1. Merge changes to shared. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), - pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID - - ```bash - git checkout -b shared - git push origin shared - ``` - -## Post deployment -1. `cd` out of the `foundations` repository. - -1. Grab the Service Catalogs ID - ```shell - export SERVICE_CATALOG_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared" output -raw service_catalog_project_id) - echo ${SERVICE_CATALOG_PROJECT_ID} - ``` - -1. Clone the freshly minted Cloud Source Repository that was created for this project. - ```shell - gcloud source repos clone service-catalog --project=${SERVICE_CATALOG_PROJECT_ID} - ``` -1. Enter the repo folder and copy over the service catalogs files from `5-app-infra/source_repos` folder. - ```shell - cd service-catalog - cp -RT ../terraform-google-enterprise-genai/5-app-infra/source_repos/service-catalog/ . - ``` - -1. Commit changes and push main branch to the new repo. - ```shell - git add . - git commit -m 'Initialize Service Catalog Build Repo' - - git push --set-upstream origin main - ``` - -1. Navigate to the project that was output from `${ARTIFACT_PROJECT_ID}` in Google's Cloud Console to view the first run of images being built. - -### Run Terraform locally - -1. The next instructions assume that you are at the same level of the `terraform-google-enterprise-genai` folder. Change into `5-app-infra` folder, copy the Terraform wrapper script and ensure it can be executed. - - ```bash - cd terraform-google-enterprise-genai/5-app-infra/projects/service-catalog - cp ../../../build/tf-wrapper.sh . - chmod 755 ./tf-wrapper.sh - ``` - -1. Rename `common.auto.example.tfvars` files to `common.auto.tfvars`. - - ```bash - mv common.auto.example.tfvars common.auto.tfvars - ``` - -1. Update `common.auto.tfvars` file with values from your environment. -1. Use `terraform output` to get the project backend bucket value from 0-bootstrap. - - ```bash - export remote_state_bucket=$(terraform -chdir="../0-bootstrap/" output -raw projects_gcs_bucket_tfstate) - echo "remote_state_bucket = ${remote_state_bucket}" - sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars - ``` - -1. Provide the user that will be running `./tf-wrapper.sh` the Service Account Token Creator role to the bu3 Terraform service account. -1. Provide the user permissions to run the terraform locally with the `serviceAccountTokenCreator` permission. - - ```bash - member="user:$(gcloud auth list --filter="status=ACTIVE" --format="value(account)")" - echo ${member} - - project_id=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${project_id} - - terraform_sa=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-service-catalog"' --raw-output) - echo ${terraform_sa} - - gcloud iam service-accounts add-iam-policy-binding ${terraform_sa} --project ${project_id} --member="${member}" --role="roles/iam.serviceAccountTokenCreator" - ``` - -1. Update `backend.tf` with your bucket from the infra pipeline output. - - ```bash - export backend_bucket=$(terraform -chdir="../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-service-catalog"' --raw-output) - echo "backend_bucket = ${backend_bucket}" - - for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done - ``` - -We will now deploy each of our environments (development/production/non-production) using this script. -When using Cloud Build or Jenkins as your CI/CD tool, each environment corresponds to a branch in the repository for the `5-app-infra` step. Only the corresponding environment is applied. - -To use the `validate` option of the `tf-wrapper.sh` script, please follow the [instructions](https://cloud.google.com/docs/terraform/policy-validation/validate-policies#install) to install the terraform-tools component. - -1. Use `terraform output` to get the Infra Pipeline Project ID from 4-projects output. - - ```bash - export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) - echo ${INFRA_PIPELINE_PROJECT_ID} - - export GOOGLE_IMPERSONATE_SERVICE_ACCOUNT=$(terraform -chdir="../../../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-service-catalog"' --raw-output) - echo ${GOOGLE_IMPERSONATE_SERVICE_ACCOUNT} - ``` - -1. Run `init` and `plan` and review output for environment shared (common). - - ```bash - ./tf-wrapper.sh init shared - ./tf-wrapper.sh plan shared - ``` - -1. Run `validate` and check for violations. - - ```bash - ./tf-wrapper.sh validate shared $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} - ``` - -1. Run `apply` shared. - - ```bash - ./tf-wrapper.sh apply shared - ``` - - -If you received any errors or made any changes to the Terraform config or `common.auto.tfvars` you must re-run `./tf-wrapper.sh plan ` before running `./tf-wrapper.sh apply `. - -After executing this stage, unset the `GOOGLE_IMPERSONATE_SERVICE_ACCOUNT` environment variable. - -```bash -unset GOOGLE_IMPERSONATE_SERVICE_ACCOUNT -``` - diff --git a/5-app-infra/projects/service-catalog/business_unit_3/shared/common.auto.tfvars b/5-app-infra/projects/service-catalog/business_unit_3/shared/common.auto.tfvars deleted file mode 120000 index 39aaa462..00000000 --- a/5-app-infra/projects/service-catalog/business_unit_3/shared/common.auto.tfvars +++ /dev/null @@ -1 +0,0 @@ -../../common.auto.tfvars \ No newline at end of file diff --git a/5-app-infra/projects/service-catalog/business_unit_3/shared/service_catalog.tf b/5-app-infra/projects/service-catalog/business_unit_3/shared/service_catalog.tf index 9686b40e..35376c79 100644 --- a/5-app-infra/projects/service-catalog/business_unit_3/shared/service_catalog.tf +++ b/5-app-infra/projects/service-catalog/business_unit_3/shared/service_catalog.tf @@ -15,7 +15,7 @@ */ module "service_catalog" { - source = "../../modules/svc_ctlg" + source = "../../../../modules/service_catalog" project_id = local.service_catalog_project_id region = var.instance_region diff --git a/5-app-infra/projects/service-catalog/modules/svc_ctlg/data.tf b/5-app-infra/projects/service-catalog/modules/svc_ctlg/data.tf deleted file mode 100644 index 12e6f689..00000000 --- a/5-app-infra/projects/service-catalog/modules/svc_ctlg/data.tf +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -data "google_project" "project" { - project_id = var.project_id -} - -data "google_projects" "kms" { - filter = "labels.application_name:org-kms labels.environment:production lifecycleState:ACTIVE" - # filter = "labels.application_name:org-kms labels.environment:${data.google_project.project.labels.environment} lifecycleState:ACTIVE" -} - -data "google_kms_key_ring" "kms" { - name = "sample-keyring" - location = var.region - project = data.google_projects.kms.projects.0.project_id -} - -data "google_kms_crypto_key" "key" { - name = data.google_project.project.name - key_ring = data.google_kms_key_ring.kms.id -} - -data "google_projects" "log" { - filter = "labels.application_name:org-logging labels.environment:production lifecycleState:ACTIVE" -} diff --git a/5-app-infra/projects/service-catalog/modules/svc_ctlg/locals.tf b/5-app-infra/projects/service-catalog/modules/svc_ctlg/locals.tf deleted file mode 100644 index 1d4c7e5a..00000000 --- a/5-app-infra/projects/service-catalog/modules/svc_ctlg/locals.tf +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -locals { - # github_repository = replace(var.github_remote_uri, "https://", "") - log_bucket_prefix = "bkt" - bucket_permissions = { - - "roles/storage.admin" = [ - "serviceAccount:${data.google_project.project.number}@cloudbuild.gserviceaccount.com" - ], - "roles/storage.legacyObjectReader" = [ - "serviceAccount:${var.machine_learning_project_number}@cloudbuild.gserviceaccount.com", - ], - } - - bucket_roles = flatten([ - for role in keys(local.bucket_permissions) : [ - for sa in local.bucket_permissions[role] : - { - role = role - acct = sa - } - ] - ]) -} - - diff --git a/5-app-infra/projects/service-catalog/modules/svc_ctlg/main.tf b/5-app-infra/projects/service-catalog/modules/svc_ctlg/main.tf deleted file mode 100644 index ed53600e..00000000 --- a/5-app-infra/projects/service-catalog/modules/svc_ctlg/main.tf +++ /dev/null @@ -1,143 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -# resource "google_project_service_identity" "storage_agent" { -# provider = google-beta - -# project = var.project_id -# service = "storage.googleapis.com" -# } -# resource "google_kms_crypto_key_iam_member" "storage-kms-key-binding" { -# crypto_key_id = data.google_kms_crypto_key.key.id -# role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" -# member = "serviceAccount:${google_project_service_identity.storage_agent.email}" -# } - -resource "random_string" "bucket_name" { - length = 4 - upper = false - numeric = true - lower = true - special = false -} - -resource "google_storage_bucket" "bucket" { - location = var.region - name = "${var.gcs_bucket_prefix}-${var.project_id}-${lower(var.region)}-${random_string.bucket_name.result}" - project = var.project_id - uniform_bucket_level_access = true - - encryption { - default_kms_key_name = data.google_kms_crypto_key.key.id - } - versioning { - enabled = true - } - logging { - log_bucket = join("-", [local.log_bucket_prefix, data.google_projects.log.projects.0.project_id]) - } - -} - -resource "google_storage_bucket_iam_member" "bucket_role" { - for_each = { for gcs in local.bucket_roles : "${gcs.role}-${gcs.acct}" => gcs } - bucket = google_storage_bucket.bucket.name - role = each.value.role - member = each.value.acct -} - -# resource "google_sourcerepo_repository" "service_catalog" { -# project = var.project_id -# name = var.name -# } - -resource "google_sourcerepo_repository_iam_member" "read" { - project = var.project_id - repository = var.name - role = "roles/viewer" - member = "serviceAccount:${var.tf_service_catalog_sa_email}" -} - -resource "google_cloudbuild_trigger" "zip_files" { - name = "zip-tf-files-trigger" - project = var.project_id - location = var.region - - # repository_event_config { - # repository = var.cloudbuild_repo_id - # push { - # branch = "^main$" - # } - # } - - trigger_template { - branch_name = "^main$" - repo_name = var.name - } - - build { - # step { - # id = "unshallow" - # name = "gcr.io/cloud-builders/git" - # secret_env = ["token"] - # entrypoint = "/bin/bash" - # args = [ - # "-c", - # "git fetch --unshallow https://$token@${local.github_repository}" - # ] - - # } - step { - id = "unshallow" - name = "gcr.io/cloud-builders/git" - entrypoint = "/bin/bash" - args = [ - "-c", - "git fetch --unshallow" - ] - - } - # available_secrets { - # secret_manager { - # env = "token" - # version_name = var.secret_version_name - # } - # } - step { - id = "find-folders-affected-in-push" - name = "gcr.io/cloud-builders/git" - entrypoint = "/bin/bash" - args = [ - "-c", - <<-EOT - changed_files=$(git diff $${COMMIT_SHA}^1 --name-only -r) - changed_folders=$(echo "$changed_files" | awk -F/ '{print $2}' | sort | uniq ) - - for folder in $changed_folders; do - echo "Found change in folder: $folder" - (cd modules/$folder && find . -type f -name '*.tf' -exec tar -cvzPf "/workspace/$folder.tar.gz" {} +) - done - EOT - ] - } - step { - id = "push-to-bucket" - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "/workspace/*.tar.gz", "gs://${google_storage_bucket.bucket.name}/modules/"] - } - } -} - diff --git a/5-app-infra/projects/service-catalog/modules/svc_ctlg/outputs.tf b/5-app-infra/projects/service-catalog/modules/svc_ctlg/outputs.tf deleted file mode 100644 index 9efe0568..00000000 --- a/5-app-infra/projects/service-catalog/modules/svc_ctlg/outputs.tf +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -output "storage_bucket_name" { - description = "Name of storage bucket created" - value = google_storage_bucket.bucket.name -} - -output "cloudbuild_trigger_id" { - description = "Id of Cloud Build Trigger" - value = google_cloudbuild_trigger.zip_files.id -} diff --git a/5-app-infra/projects/service-catalog/modules/svc_ctlg/variables.tf b/5-app-infra/projects/service-catalog/modules/svc_ctlg/variables.tf deleted file mode 100644 index df0cf433..00000000 --- a/5-app-infra/projects/service-catalog/modules/svc_ctlg/variables.tf +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Copyright 2021 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -variable "name" { - description = "Name of the repository." - type = string -} -variable "region" { - description = "Location of the repository." - type = string -} - -variable "project_id" { - description = "Project ID" - type = string -} - -variable "gcs_bucket_prefix" { - description = "Prefix of the bucket name" - default = "bkt" -} - -variable "tf_service_catalog_sa_email" { - description = "Full email of the terraform service account for service-catalog" - type = string -} - -variable "machine_learning_project_number" { - description = "Project Number for the Machine Learning (Vertex) Project" - type = string -} diff --git a/5-app-infra/2-artifact-publish-repo/images/tf2-cpu.2-8:01/Dockerfile b/5-app-infra/source_repos/artifact-publish/images/tf2-cpu.2-8:01/Dockerfile similarity index 100% rename from 5-app-infra/2-artifact-publish-repo/images/tf2-cpu.2-8:01/Dockerfile rename to 5-app-infra/source_repos/artifact-publish/images/tf2-cpu.2-8:01/Dockerfile diff --git a/5-app-infra/2-artifact-publish-repo/images/vertexpipeline:v2/Dockerfile b/5-app-infra/source_repos/artifact-publish/images/vertexpipeline:v2/Dockerfile similarity index 100% rename from 5-app-infra/2-artifact-publish-repo/images/vertexpipeline:v2/Dockerfile rename to 5-app-infra/source_repos/artifact-publish/images/vertexpipeline:v2/Dockerfile diff --git a/6-ml-pipeline/dev/Readme.md b/6-ml-pipeline/dev/Readme.md deleted file mode 100644 index 4b1f24e1..00000000 --- a/6-ml-pipeline/dev/Readme.md +++ /dev/null @@ -1,164 +0,0 @@ -# Overview -This environment is set up for interactive coding and experimentations. After the project is up, the vertex workbench is deployed from service catalog and The datascientis can use it to write their code including any experiments, data processing code and pipeline components. In addition, a cloud storage bucket is deployed to use as the storage for our operations. Optionally a composer environment is which will later be used to schedule the pipeline run on intervals. - -For our pipeline which trains and deploys a model on the [census income dataset](https://archive.ics.uci.edu/dataset/20/census+income), we use a notebook in the dev workbench to create our pipeline components, put them together into a pipeline and do a dry run of the pipeline to make sure there are no issues. You can access the repository [here](https://github.com/GoogleCloudPlatform/terraform-google-enterprise-genai/tree/main/7-vertexpipeline). [^1] - -[^1]: There is a Dockerfile in the repo which is the docker image used to run all pipeline steps and cloud build steps. In non-prod and prod environments, the only NIST compliant way to access additional dependencies and requirements is via docker images uploaded to artifact registry. We have baked everything for running the pipeline into this docker which exsits in the shared artifact registry. - -Once confident, we divide the code in two separate files to use in our CI/CD process in the non-prod environment. First is *compile_pipeline.py* which includes the code to build the pipeline and compile it into a directory (in our case, /common/vertex-ai-pipeline/pipeline_package.yaml) - -The second file, i.e. *runpipeline.py* includes the code for running the compiled pipeline. This is where the correct environemnt variables for non-prod nad prod (e.g., service accounts to use for each stage of the pipeline, kms keys corresponding to each step, buckets, etc.) are set. And eventually the pipeline is loaded from the yaml file at *common/vertex-ai-pipeline/pipeline_package.yaml* and submitted to vertex ai. - - -There is a *cloudbuild.yaml* file in the repo with the CI/CD steps as follows: - -1. Upload the Dataflow src file to the bucket in non-prod -2. Upload the dataset to the bucket -3. Run *compile_pipeline.py* to compile the pipeline -4. Run the pipeline via *runpipeline.py* -5. Optionally, upload the pipeline's yaml file to the composer bucket to make it available for scheduled pipeline runs - -The cloud build trigger will be setup in the non-prod project which is where the ML pipeline will run. There are currently three branches on the repo namely dev, staging (non-prod), and prod. Cloud build will trigger the pipeline once there is a merge into the staging (non-prod) branch from dev. However, model deployment and monitorings steps take place in the prod environment. As a result, the service agents and service accounts of the non-prod environment are given some permission on the prod environment and vice versa. - -Each time a pipeline job finishes successfully, a new version of the census income bracket predictor model will be deployed on the endpoint which will only take 25 percent of the traffic wherease the other 75 percent goes to the previous version of the model to enable A/B testing. - -You can read more about the details of the pipeline components on the [pipeline's repo](https://github.com/GoogleCloudPlatform/terraform-google-enterprise-genai/tree/main/7-vertexpipeline#readme) - -# Step by step -Before you start, make sure you have your personal git access token ready. The git menu option on the left bar of the workbench requires the personal token to connect to git and clone the repo. -Also make sure to have a gcs bucket ready to store the artifacts for the tutorial. To deploy the bucket, you can go to service catalog and create a new deployment from the storage bucket solution. -### 1. Run the notebook -- Take 7-vertexpipeline folder and make you own copy as a standalone git repository and clone it in the workbench in your dev project. Create a dev branch of the new repository. Switch to the dev branch by choosing it in the branch section of the git view. Now go back to the file browser view by clicking the first option on the left bar menu. Navigate to the directory you just clone and run [the notebook](https://github.com/GoogleCloudPlatform/terraform-google-enterprise-genai/blob/main/7-vertexpipeline/census_pipeline.ipynb) cell by cell. Pay attention to the instructions and comments in the notebook and don't forget to set the correct values corresponding to your dev project. - -### 2. Configure cloud build -- After the notebook runs successfully and the pipeline's test run finishes in the dev environment, create a cloud build trigger in your non-prod project. Configure the trigger to run when there is a merge into the staging (non-prod) branch by following the below settings. - - |Setting|Value| - |-------|-----| - |Event|push to branch| - |Repository generation|1st gen| - |Repository|the url to your fork of the repo| - |Branch|staging| - |Configuration|Autodetected/Cloud Build configuration file (yaml or json)| - |Location|Repository| - |Cloud Build configuration file location|cloudbuild.yaml| - - -- Open the cloudbuild.yaml file in your workbench and for steps 1 which uploads the source code for the dataflow job to your bucket. - - ``` - name: 'gcr.io/cloud-builders/gsutil' - args: ['cp', '-r', './src', 'gs://{your-bucket-name}'] - ``` - -- Similarly in step 2, replace the bucket name with the name of your own bucket in the non-prod project in order to upload the data to your bucket: - ``` - name: 'gcr.io/cloud-builders/gsutil' - args: ['cp', '-r', './data', 'gs://{your-bucket-name}'] - ``` - -- Change the name of the image for step 3 and 4 to that of your own artifact project, i.e., `us-central1-docker.pkg.dev/{artifact_project_id}/c-publish-artifacts/vertexpipeline:v2` This is the project with artifact registry that houses the image required to run the pipeline. - -``` - - name: 'us-central1-docker.pkg.dev/{your-artifact-project}/c-publish-artifacts/vertexpipeline:v2' - entrypoint: 'python' - args: ['compile_pipeline.py'] - id: 'compile_job' - - # run pipeline - - name: 'us-central1-docker.pkg.dev/{your-artifact-project}/c-publish-artifacts/vertexpipeline:v2' - entrypoint: 'python' - args: ['runpipeline.py'] - id: 'run_job' - waitFor: ['compile_job'] -``` - -- Optionally, if you want to schedule pipeline runs on regular intervals, uncomment the last two steps and replace the composer bucket with the name of your composer's bucket. The first step uploads the pipeline's yaml to the bucket and the second step uploads the dag to read that yaml and trigger the vertex pipeline: -``` - # upload to composer - - name: 'gcr.io/cloud-builders/gsutil' - args: ['cp', './common/vertex-ai-pipeline/pipeline_package.yaml', 'gs://{your-composer-bucket}/dags/common/vertex-ai-pipeline/'] - id: 'upload_composer_file' - - # upload pipeline dag to composer - - name: 'gcr.io/cloud-builders/gsutil' - args: ['cp', './composer/dags/dag.py', 'gs://{your-composer-bucket}/dags/'] - id: 'upload dag' -``` - -### 3. Configure variables in compile_pipeline.py and runpipeline.py -- Make sure to set the correct values for variables like **PROJECT_ID**, **BUCKET_URI**, encryption keys and service accounts, etc.: - - |variable|definition|example value|How to obtain| - |--------|----------|-------------|-------------| - |PROJECT_ID|The id of the non-prod project|`{none-prod-project-id}`|From the project's menu in console navigate to the `fldr-non-production/fldr-non-production-bu3` folder; here you can find the machine learning project in non-prod (`prj-n-bu3machine-learning`) and obtain its' ID| - |BUCKET_URI|URI of the non-prod bucket|`gs://non-prod-bucket`|From the project menu in console navigate to the non-prod ML project `fldr-non-production/fldr-non-production-bu3/prj-n-bu3machine-learning` project, navigate to cloud storage and copy the name of the bucket available there| - |REGION|The region for pipeline jobs|Can be left as default `us-central1`| - |PROD_PROJECT_ID|ID of the prod project|`prod-project-id`|In console's project menu, navigate to the `fldr-production/fldr-production-bu3` folder; here you can find the machine learning project in prod (`prj-p-bu3machine-learning`) and obtain its' ID| - |Image|The image artifact used to run the pipeline components. The image is already built and pushed to the artifact repository in your artifact project under the common folder|`f"us-central1-docker.pkg.dev/{{artifact-project}}/{{artifact-repository}}/vertexpipeline:v2"`|Navigate to `fldr-common/prj-c-bu3artifacts` project. Navigate to the artifact registry repositories in the project to find the full name of the image artifact.| - |DATAFLOW_SUBNET|The shared subnet in non-prod env required to run the dataflow job|`https://www.googleapis.com/compute/v1/projects/{non-prod-network-project}/regions/us-central1/subnetworks/{subnetwork-name}`|Navigate to the `fldr-network/prj-n-shared-restricted` project. Navigate to the VPC networks and under the subnets tab, find the name of the network associated with your region (us-central1)| - |SERVICE_ACCOUNT|The service account used to run the pipeline and it's components such as the model monitoring job. This is the compute default service account of non-prod if you don't plan on using another costume service account|`{non-prod-project_number}-compute@developer.gserviceaccount.com`|Head over to the IAM page in the non-prod project `fldr-non-production/fldr-non-production-bu3/prj-n-bu3machine-learning`, check the box for `Include Google-provided role grants` and look for the service account with the `{project_number}-compute@developer.gserviceaccount.com`| - |PROD_SERICE_ACCOUNT|The service account used to create endpoint, upload the model, and deploy the model in the prod project. This is the compute default service account of prod if you don't plan on using another costume service account|`{prod-project_number}-compute@developer.gserviceaccount.com`|Head over to the IAM page in the prod project `fldr-production/fldr-production-bu3/prj-p-bu3machine-learning`, check the box for `Include Google-provided role grants` and look for the service account with the `{project_number}-compute@developer.gserviceaccount.com`| - |deployment_config['encryption']|The kms key for the prod env. This key is used to encrypt the vertex model, endpoint, model deployment, and model monitoring.|`projects/{prod-kms-project}/locations/us-central1/keyRings/{keyring-name}/cryptoKeys/{key-name}`|Navigate to `fldr-production/prj-n-kms`, navigate to the Security/Key management in that project to find the key in `sample-keyring` keyring of your target region `us-central1`| - |encryption_spec_key_name|The name of the encryption key for the non-prod env. This key is used to create the vertex pipeline job and it's associated metadata store|`projects/{non-prod-kms-project}/locations/us-central1/keyRings/{keyring-name}/cryptoKeys/{key-name}`|Navigate to `fldr-non-production/prj-n-kms`, navigate to the Security/Key management in that project to find the key in `sample-keyring` keyring of your target region `us-central1`| - |monitoring_config['email']|The email that Vertex AI monitoring will email alerts to|`your email`|your email associated with your gcp account| - -The compile_pipeline.py and runpipeline.py files are commented to point out these variables. -### 4. Merge and deploy -- Once everything is configured, you can commit your changes and push to the dev branch. Then, create a PR to from dev to staging(non-prod) which will result in triggering the pipeline if approved. The vertex pipeline takes about 30 minutes to finish and if there are no errors, a trained model will be deployed to and endpoint in the prod project which you can use to make prediction requests. - -### 5. Model Validation - -Once you have the model running at an endpoint in the production project, you will be able to test it. -Here are step-by-step instructions to make a request to your model using `gcloud` and `curl`: - -1. Initialize variables on your terminal session - - ```bash - ENDPOINT_ID= - PROJECT_ID= - INPUT_DATA_FILE="body.json" - ``` - - > You can retrieve your ENDPOINT_ID by running `gcloud ai endpoints list --region=us-central1 --project=` or by navigating to it on the Google Cloud Console (https://console.cloud.google.com/vertex-ai/online-prediction/endpoints?project=`) - -2. Create a file named `body.json` and put some sample data into it: - - ```json - { - "instances": [ - { - "features/gender": "Female", - "features/workclass": "Private", - "features/occupation": "Tech-support", - "features/marital_status": "Married-civ-spouse", - "features/race": "White", - "features/capital_gain": 0, - "features/education": "9th", - "features/age": 33, - "features/hours_per_week": 40, - "features/relationship": "Wife", - "features/native_country": "Canada", - "features/capital_loss": 0 - } - ] - } - ``` - -3. Run a curl request using `body.json` file as the JSON Body. - - ```bash - curl -X POST -H "Authorization: Bearer $(gcloud auth print-access-token)" \ - -H "Content-Type: application/json" \ - https://us-central1-aiplatform.googleapis.com/v1/projects/${PROJECT_ID}/locations/us-central1/endpoints/${ENDPOINT_ID}:predict -d "@${INPUT_DATA_FILE}" - ``` - - - You should get an output from 0 to 1, indicating the level of confidence of the binary classification based on the parameters above. - Values closer to 1 means the individual is more likely to be included in the income_bracket greater than 50K. - -# Common errors -- ***google.api_core.exceptions.ResourceExhausted: 429 The following quotas are exceeded: ```CustomModelServingCPUsPerProjectPerRegion 8: The following quotas are exceeded: CustomModelServingCPUsPerProjectPerRegion``` or similar error***: -This is likely due to the fact that you have too many models uploaded and deployed in Vertex AI. To resolve the issue, you can either submit a quota increase request or undeploy and delete a few models to free up resources - -- ***Google Compute Engine Metadata service not available/found***: -You might encounter this when the vertex pipeline job attempts to run even though it is an obsolete issue according to [this thread](https://issuetracker.google.com/issues/229537245#comment9). It'll most likely resolve by re-running the vertex pipeline diff --git a/Makefile b/Makefile index 6ca20792..215cbcae 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,7 @@ docker_test_lint: docker run --rm -it \ -e ENABLE_PARALLEL=0 \ -e DISABLE_TFLINT=1 \ - -e EXCLUDE_LINT_DIRS="\./5-app-infra/6-machine-learning|\./5-app-infra/projects/machine-learning|\./docs/assets/terraform" \ + -e EXCLUDE_LINT_DIRS="\./examples/machine-learning-pipeline|\./docs/assets/terraform" \ -v $(CURDIR):/workspace \ $(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \ /usr/local/bin/test_lint.sh diff --git a/README.md b/README.md index de72bcb6..8f063b27 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ Alternatively, the user can follow steps on [`docs/deploy_on_foundation_v4.0.0.m Additional steps were added to provide an example Machine Learning application: -* [6. ml-pipeline](./6-ml-pipeline/) +* [machine-learning-pipeline](./examples/machine-learning-pipeline) * This additional step, adds an interactive coding and experimentation, deploying the Vertex Workbench for data scientists. * The step will guide you through creating a ML pipeline using a notebook on Google Vertex AI Workbench Instance. * After promoting the ML pipeline, it is triggered by Cloud Build upon staging branch merges, trains and deploys a model using the census income dataset. diff --git a/docs/assets/terraform/1-org/README.md b/docs/assets/terraform/1-org/README.md index 092ada1b..c186dd9a 100644 --- a/docs/assets/terraform/1-org/README.md +++ b/docs/assets/terraform/1-org/README.md @@ -1 +1,12 @@ # Terraform code in this directory is used on docs as reference/assets. + + +## Inputs + +No inputs. + +## Outputs + +No outputs. + + \ No newline at end of file diff --git a/docs/assets/terraform/2-environments/README.md b/docs/assets/terraform/2-environments/README.md index 092ada1b..c186dd9a 100644 --- a/docs/assets/terraform/2-environments/README.md +++ b/docs/assets/terraform/2-environments/README.md @@ -1 +1,12 @@ # Terraform code in this directory is used on docs as reference/assets. + + +## Inputs + +No inputs. + +## Outputs + +No outputs. + + \ No newline at end of file diff --git a/5-app-infra/6-machine-learning/.gitignore b/examples/machine-learning-pipeline/.gitignore similarity index 100% rename from 5-app-infra/6-machine-learning/.gitignore rename to examples/machine-learning-pipeline/.gitignore diff --git a/examples/machine-learning-pipeline/README.md b/examples/machine-learning-pipeline/README.md new file mode 100644 index 00000000..7ce16ee4 --- /dev/null +++ b/examples/machine-learning-pipeline/README.md @@ -0,0 +1,1002 @@ +# Machine Learning Pipeline Overview + +This repo is part of a multi-part guide that shows how to configure and deploy +the example.com reference architecture described in +[Google Cloud security foundations guide](https://cloud.google.com/architecture/security-foundations). The following table lists the parts of the guide. + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
0-bootstrapBootstraps a Google Cloud organization, creating all the required resources +and permissions to start using the Cloud Foundation Toolkit (CFT). This +step also configures a CI/CD Pipeline for foundations code in subsequent +stages.
1-orgSets up top-level shared folders, monitoring and networking projects, +organization-level logging, and baseline security settings through +organizational policies.
2-environmentsSets up development, non-production, and production environments within the +Google Cloud organization that you've created.
3-networks-dual-svpcSets up base and restricted shared VPCs with default DNS, NAT (optional), +Private Service networking, VPC service controls, on-premises Dedicated +Interconnect, and baseline firewall rules for each environment. It also sets +up the global DNS hub.
4-projectsSets up a folder structure, projects, and an application infrastructure pipeline for applications, + which are connected as service projects to the shared VPC created in the previous stage.
Machine-learning-pipeline(this file)Deploys modules based on the modules created in 5-app-infra
+ +For an overview of the architecture and the parts, see the +[terraform-google-enterprise-genai README](https://github.com/terraform-google-modules/terraform-google-enterprise-genai) +file. + +## Purpose + +The purpose of this guide is to provide a structured to deploying a machine learning pipeline on Google Cloud Platform using Vertex AI. + +## Prerequisites + +1. 0-bootstrap executed successfully. +2. 1-org executed successfully. +3. 2-environments executed successfully. +4. 3-networks executed successfully. +5. 4-projects executed successfully. +6. 5-app-infra executed successfully. +7. The step bellow `VPC-SC` executed successfully. + +### VPC-SC + +By now, `artifact-publish` and `service-catalog` have been deployed. The projects inflated under `machine-learning-pipeline` are set in a service perimiter for added security. As such, several services and accounts must be given ingress and egress policies before `machine-learning-pipeline` has been deployed. + +cd into gcp-networks + + ```bash + cd gcp-networks/ + ``` + +Below, you can find the values that will need to be applied to `common.auto.tfvars` and your `development.auto.tfvars`, `non-production.auto.tfvars` & `production.auto.tfvars`. + +In `common.auto.tfvars` update your `perimeter_additional_members` to include: + + ``` + "serviceAccount:sa-tf-cb-bu3-machine-learning@[prj_c_bu3infra_pipeline_project_id].iam.gserviceaccount.com" + "serviceAccount:sa-terraform-env@[prj_b_seed_project_id].iam.gserviceaccount.com" + "serviceAccount:service-[prj_d_logging_project_number]@gs-project-accounts.iam.gserviceaccount.com" + "serviceAccount:[prj_d_machine_learning_project_number]@cloudbuild.gserviceaccount.com" + ``` + + ```bash + export prj_c_bu3infra_pipeline_project_id=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) + echo "prj_c_bu3infra_pipeline_project_id = ${prj_c_bu3infra_pipeline_project_id}" + + export prj_b_seed_project_id=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw seed_project_id) + echo "prj_b_seed_project_id = ${prj_b_seed_project_id}" + + export prj_b_seed_project_id=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw seed_project_id) + echo "prj_b_seed_project_id = ${prj_b_seed_project_id}" + + export prj_b_seed_project_id=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw seed_project_id) + echo "prj_b_seed_project_id = ${prj_b_seed_project_id}" + + export backend_bucket=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw gcs_bucket_tfstate) + echo "remote_state_bucket = ${backend_bucket}" + + export backend_bucket_projects=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw projects_gcs_bucket_tfstate) + echo "backend_bucket_projects = ${backend_bucket_projects}" + + export project_d_logging_project_number=$(gsutil cat gs://$backend_bucket/terraform/environments/development/default.tfstate | jq -r '.outputs.env_log_project_number.value') + echo "project_d_logging_project_number = ${project_d_logging_project_number}" + + prj_d_machine_learning_project_number=$(gsutil cat gs://$backend_bucket_projects/terraform/projects/business_unit_3/development/default.tfstate | jq -r '.outputs.machine_learning_project_number.value') + echo "project_d_machine_learning_number = ${prj_d_machine_learning_project_number}" + ``` + + +In each respective environment folders, update your `development.auto.tfvars`, `non-production.auto.tfvars` & `production.auto.tfvars` to include these changes under `ingress_policies` + +You can find the `sources.access_level` information by going to `Security` in your GCP Organization. +Once there, select the perimeter that is associated with the environment (eg. `development`). Copy the string under Perimeter Name and place it under `YOUR_ACCESS_LEVEL` + +#### Ingress Policies + + ``` + ingress_policies = [ + + // users + { + "from" = { + "identity_type" = "ANY_IDENTITY" + "sources" = { + "access_level" = "[YOUR_ACCESS_LEVEL]" + } + }, + "to" = { + "resources" = [ + "projects/[your-environment-shared-restricted-project-number]", + "projects/[your-environment-kms-project-number]", + "projects/[your-environment-bu3machine-learning-number]", + ] + "operations" = { + "compute.googleapis.com" = { + "methods" = ["*"] + } + "dns.googleapis.com" = { + "methods" = ["*"] + } + "logging.googleapis.com" = { + "methods" = ["*"] + } + "storage.googleapis.com" = { + "methods" = ["*"] + } + "cloudkms.googleapis.com" = { + "methods" = ["*"] + } + "iam.googleapis.com" = { + "methods" = ["*"] + } + "cloudresourcemanager.googleapis.com" = { + "methods" = ["*"] + } + "pubsub.googleapis.com" = { + "methods" = ["*"] + } + "secretmanager.googleapis.com" = { + "methods" = ["*"] + } + "aiplatform.googleapis.com" = { + "methods" = ["*"] + } + "composer.googleapis.com" = { + "methods" = ["*"] + } + "cloudbuild.googleapis.com" = { + "methods" = ["*"] + } + "bigquery.googleapis.com" = { + "methods" = ["*"] + } + } + } + }, + ] + ``` + +#### Egress Policies + +For your DEVELOPMENT.AUTO.TFVARS file, also include this as an egress policy: + + ```bash + egress_policies = [ + // notebooks + { + "from" = { + "identity_type" = "" + "identities" = [ + "serviceAccount:service-[prj-d-bu3machine-learning-project-number]@gcp-sa-notebooks.iam.gserviceaccount.com", + "serviceAccount:service-[prj-d-bu3machine-learning-project-number]@compute-system.iam.gserviceaccount.com", + ] + }, + "to" = { + "resources" = ["projects/[prj-d-kms-project-number]"] + "operations" = { + "compute.googleapis.com" = { + "methods" = ["*"] + } + "cloudkms.googleapis.com" = { + "methods" = ["*"] + } + } + } + }, + ] + ``` + +### Troubleshooting + +Please refer to [troubleshooting](../docs/TROUBLESHOOTING.md) if you run into issues during this step. + +## Usage + +**Note:** If you are using MacOS, replace `cp -RT` with `cp -R` in the relevant +commands. The `-T` flag is needed for Linux, but causes problems for MacOS. + +You will need a github repository set up for this step. This repository houses the DAG's for composer. As of this writing, the structure is as follows: + + ``` + . + ├── README.md + └── dags + ├── hello_world.py + └── strings.py + ``` + +Add in your dags in the `dags` folder. Any changes to this folder will trigger a pipeline and place the dags in the appropriate composer environment depending on which branch it is pushed to (`development`, `non-production`, `production`) + +Have a github token for access to your repository ready, along with an [Application Installation Id](https://cloud.google.com/build/docs/automating-builds/github/connect-repo-github#connecting_a_github_host_programmatically) and the remote uri to your repository. + +These environmental project inflations are closely tied to the `service-catalog` project that have already deployed. By now, the `bu3-service-catalog` should have been inflated. `service-catalog` contains modules that are being deployed in an interactive (development) environment. Since they already exist; they can be used as terraform modules for operational (non-production, production) environments. This was done in order to avoid code redundancy. One area for all `machine-learning` deployments. + +Under `modules/base_env/main.tf` you will notice all module calls are using `git` links as sources. These links refer to the `service-catalog` cloud source repository we have already set up. + +Step 12 in "Deploying with Cloud Build" highlights the necessary steps needed to point the module resources to the correct location. + +### Deploying with Cloud Build + +1. Clone the `gcp-policies` repo based on the Terraform output from the `0-bootstrap` step. +Clone the repo at the same level of the `terraform-google-enterprise-genai` folder, the following instructions assume this layout. +Run `terraform output cloudbuild_project_id` in the `0-bootstrap` folder to get the Cloud Build Project ID. + + ```bash + export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="gcp-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) + echo ${INFRA_PIPELINE_PROJECT_ID} + + gcloud source repos clone gcp-policies gcp-policies-app-infra --project=${INFRA_PIPELINE_PROJECT_ID} + ``` + + **Note:** `gcp-policies` repo has the same name as the repo created in step `1-org`. In order to prevent a collision, the previous command will clone this repo in the folder `gcp-policies-app-infra`. + +1. Navigate into the repo and copy contents of policy-library to new repo. All subsequent steps assume you are running them + from the gcp-policies-app-infra directory. If you run them from another directory, + adjust your copy paths accordingly. + + ```bash + cd gcp-policies-app-infra + git checkout -b main + + cp -RT ../terraform-google-enterprise-genai/policy-library/ . + ``` + +1. Commit changes and push your main branch to the new repo. + + ```bash + git add . + git commit -m 'Initialize policy library repo' + + git push --set-upstream origin main + ``` + +1. Navigate out of the repo. + + ```bash + cd .. + ``` + +1. Clone the `bu3-machine-learning` repo. + + ```bash + gcloud source repos clone bu3-machine-learning --project=${INFRA_PIPELINE_PROJECT_ID} + ``` + +1. Navigate into the repo, change to non-main branch and copy contents of foundation to new repo. + All subsequent steps assume you are running them from the bu3-machine-learning directory. + If you run them from another directory, adjust your copy paths accordingly. + + ```bash + cd bu3-machine-learning + git checkout -b plan + + cp -RT ../terraform-google-enterprise-genai/examples/machine-learning-pipeline . + cp ../terraform-google-enterprise-genai/build/cloudbuild-tf-* . + cp ../terraform-google-enterprise-genai/build/tf-wrapper.sh . + chmod 755 ./tf-wrapper.sh + ``` + +1. Rename `common.auto.example.tfvars` to `common.auto.tfvars`. + + ```bash + mv common.auto.example.tfvars common.auto.tfvars + ``` + +1. Update the `common.auto.tfvars` file with your github app installation id, along with the url of your repository. + + ```bash + GITHUB_APP_ID="YOUR-GITHUB-APP-ID-HERE" + GITHUB_REMOTE_URI="YOUR-GITHUB-REMOTE-URI" + + sed -i "s/GITHUB_APP_ID/${GITHUB_APP_ID}/" ./common.auto.tfvars + sed -i "s/GITHUB_REMOTE_URI/${GITHUB_REMOTE_URI}/" ./common.auto.tfvars + ``` + +1. Use `terraform output` to get the project backend bucket value from 0-bootstrap. + + ```bash + export remote_state_bucket=$(terraform -chdir="../terraform-google-enterprise-genai/0-bootstrap/" output -raw projects_gcs_bucket_tfstate) + echo "remote_state_bucket = ${remote_state_bucket}" + sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars + ``` + +1. Use `terraform output` to retrieve the Service Catalog project-id from the projects step and update values in `module/base_env`. + + ```bash + export service_catalog_project_id=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -raw service_catalog_project_id) + echo "service_catalog_project_id = ${service_catalog_project_id}" + + ## Linux + sed -i "s/SERVICE_CATALOG_PROJECT_ID/${service_catalog_project_id}/g" ./modules/base_env/main.tf + ``` + +1. Update `backend.tf` with your bucket from the infra pipeline output. + + ```bash + export backend_bucket=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-machine-learning"' --raw-output) + echo "backend_bucket = ${backend_bucket}" + + ## Linux + for i in `find . -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done + + ## MacOS + for i in `find . -name 'backend.tf'`; do sed -i "" "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done + ``` + +1. Update `modules/base_env/main.tf` with the name of service catalog project id to complete the git fqdn for module sources: + + ```bash + export service_catalog_project_id=$(terraform -chdir="../gcp-projects/business_unit_3/shared/" output -raw service_catalog_project_id) + + ##LINUX + sed -i "s/SERVICE-CATALOG-PROJECT-ID/${service_catalog_project_id}/" ./modules/base_env/main.tf + + ##MacOS + sed -i "" "s/SERVICE-CATALOG-PROJECT-ID/${service_catalog_project_id}/" ./modules/base_env/main.tf + ``` + +1. Commit changes. + + ```bash + git add . + git commit -m 'Initialize repo' + ``` + +1. Composer will rely on DAG's from a github repository. In `4-projects`, a secret 'github-api-token' was created to house your github's api access key. We need to create a new version for this secret which will be used in the composer module which is called in the `base_env` folder. Use the script below to add the secrets into each machine learnings respective environment: + + ```bash + envs=(development non-production production) + project_ids=() + github_token="YOUR-GITHUB-TOKEN" + + for env in "${envs[@]}"; do + output=$(terraform -chdir="../gcp-projects/business_unit_3/${env}" output -raw machine_learning_project_id) + project_ids+=("$output") + done + + for project in "${project_ids[@]}"; do + echo -n $github_token | gcloud secrets versions add github-api-token --data-file=- --project=${project} + done + ``` + +1. Push your plan branch to trigger a plan for all environments. Because the + _plan_ branch is not a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), pushing your _plan_ + branch triggers _terraform plan_ but not _terraform apply_. Review the plan output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID + + ```bash + git push --set-upstream origin plan + ``` + +1. Merge changes to development. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), + pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID + + ``` + git checkout -b development + git push origin development + ``` + +1. Merge changes to non-production. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), + pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID + + ```bash + git checkout -b non-production + git push origin non-production + ``` + +1. Merge changes to production branch. Because this is a [named environment branch](../docs/FAQ.md#what-is-a-named-branch), + pushing to this branch triggers both _terraform plan_ and _terraform apply_. Review the apply output in your Cloud Build project https://console.cloud.google.com/cloud-build/builds;region=DEFAULT_REGION?project=YOUR_INFRA_PIPELINE_PROJECT_ID + + ```bash + git checkout -b production + git push origin production + ``` + +1. cd out of this directory + + ```bash + cd .. + ``` + +## Running Terraform locally + +1. The next instructions assume that you are at the same level of the `terraform-google-enterprise-genai` folder. Change into `machine-learning-pipeline` folder, copy the Terraform wrapper script and ensure it can be executed. + + ```bash + cd terraform-google-enterprise-genai/examples/machine-learning-pipeline + cp ../../build/tf-wrapper.sh . + chmod 755 ./tf-wrapper.sh + ``` + +1. Rename `common.auto.example.tfvars` files to `common.auto.tfvars`. + + ```bash + mv common.auto.example.tfvars common.auto.tfvars + ``` + +1. Update `common.auto.tfvars` file with values from your environment. + +1. Use `terraform output` to get the project backend bucket value from 0-bootstrap. + + ```bash + export remote_state_bucket=$(terraform -chdir="../../0-bootstrap/" output -raw projects_gcs_bucket_tfstate) + echo "remote_state_bucket = ${remote_state_bucket}" + sed -i "s/REMOTE_STATE_BUCKET/${remote_state_bucket}/" ./common.auto.tfvars + ``` + +1. Provide the user that will be running `./tf-wrapper.sh` the Service Account Token Creator role to the bu3 Terraform service account. + +1. Provide the user permissions to run the terraform locally with the `serviceAccountTokenCreator` permission. + + ```bash + member="user:$(gcloud auth list --filter="status=ACTIVE" --format="value(account)")" + echo ${member} + + project_id=$(terraform -chdir="../../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) + echo ${project_id} + + terraform_sa=$(terraform -chdir="../../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-machine-learning"' --raw-output) + echo ${terraform_sa} + + gcloud iam service-accounts add-iam-policy-binding ${terraform_sa} --project ${project_id} --member="${member}" --role="roles/iam.serviceAccountTokenCreator" + ``` + +1. Update `backend.tf` with your bucket from the infra pipeline output. + + ```bash + export backend_bucket=$(terraform -chdir="../../4-projects/business_unit_3/shared/" output -json state_buckets | jq '."bu3-machine-learning"' --raw-output) + echo "backend_bucket = ${backend_bucket}" + + for i in `find -name 'backend.tf'`; do sed -i "s/UPDATE_APP_INFRA_BUCKET/${backend_bucket}/" $i; done + ``` + +1. Update `modules/base_env/main.tf` with Service Catalog Project Id. + + ```bash + export service_catalog_project_id=$(terraform -chdir="../../4-projects/business_unit_3/shared/" output -raw service_catalog_project_id) + echo "service_catalog_project_id = ${service_catalog_project_id}" + + ## Linux + sed -i "s/SERVICE_CATALOG_PROJECT_ID/${service_catalog_project_id}/g" ./modules/base_env/main.tf + ``` + +We will now deploy each of our environments (development/production/non-production) using this script. +When using Cloud Build or Jenkins as your CI/CD tool, each environment corresponds to a branch in the repository for the `machine-learning-pipeline` step. Only the corresponding environment is applied. + +To use the `validate` option of the `tf-wrapper.sh` script, please follow the [instructions](https://cloud.google.com/docs/terraform/policy-validation/validate-policies#install) to install the terraform-tools component. + +1. Use `terraform output` to get the Infra Pipeline Project ID from 4-projects output. + + ```bash + export INFRA_PIPELINE_PROJECT_ID=$(terraform -chdir="../../4-projects/business_unit_3/shared/" output -raw cloudbuild_project_id) + echo ${INFRA_PIPELINE_PROJECT_ID} + + export GOOGLE_IMPERSONATE_SERVICE_ACCOUNT=$(terraform -chdir="../../4-projects/business_unit_3/shared/" output -json terraform_service_accounts | jq '."bu3-machine-learning"' --raw-output) + echo ${GOOGLE_IMPERSONATE_SERVICE_ACCOUNT} + ``` + +1. Run `init` and `plan` and review output for environment production. + + ```bash + ./tf-wrapper.sh init production + ./tf-wrapper.sh plan production + ``` + +1. Run `validate` and check for violations. + + ```bash + ./tf-wrapper.sh validate production $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} + ``` + +1. Run `apply` production. + + ```bash + ./tf-wrapper.sh apply production + ``` + +1. Run `init` and `plan` and review output for environment non-production. + + ```bash + ./tf-wrapper.sh init non-production + ./tf-wrapper.sh plan non-production + ``` + +1. Run `validate` and check for violations. + + ```bash + ./tf-wrapper.sh validate non-production $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} + ``` + +1. Run `apply` non-production. + + ```bash + ./tf-wrapper.sh apply non-production + ``` + +1. Run `init` and `plan` and review output for environment development. + + ```bash + ./tf-wrapper.sh init development + ./tf-wrapper.sh plan development + ``` + +1. Run `validate` and check for violations. + + ```bash + ./tf-wrapper.sh validate development $(pwd)/../policy-library ${INFRA_PIPELINE_PROJECT_ID} + ``` + +1. Run `apply` development. + + ```bash + ./tf-wrapper.sh apply development + ``` + +If you received any errors or made any changes to the Terraform config or `common.auto.tfvars` you must re-run `./tf-wrapper.sh plan ` before running `./tf-wrapper.sh apply `. + +After executing this stage, unset the `GOOGLE_IMPERSONATE_SERVICE_ACCOUNT` environment variable. + + ```bash + unset GOOGLE_IMPERSONATE_SERVICE_ACCOUNT + ``` + +## Post Deployment + +### Big Query + + In order to avoid having to specify a kms key for every query against a bigquery resource, we set the default project encryption key to the corresponding environment key in advance + ```bash + ml_project_dev=$(terraform -chdir="gcp-projects/business_unit_3/development" output -json) + ml_project_nonprd=$(terraform -chdir="gcp-projects/business_unit_3/non-production" output -json) + ml_project_prd=$(terraform -chdir="gcp-projects/business_unit_3/production" output -json) + + projects=( "$ml_project_dev" "$ml_project_nonprd" "$ml_project_prd" ) + + for project in "${projects[@]}"; do + project_id=$(echo "$project" | jq -r '.machine_learning_project_id.value') + project_key=$(echo "$project "| jq -r '.machine_learning_kms_keys.value."us-central1".id') + echo "ALTER PROJECT \`$project_id\` SET OPTIONS (\`region-us-central1.default_kms_key_name\`=\"$project_key\");" | bq query --project_id "$project_id" --nouse_legacy_sql + done + ``` + +### VPC-SC + +1. Now that machine learning's projects have all been inflated, please _return to gcp-projects_ and update COMMON.AUTO.TFVARS with this __additional__ information under `perimeter_additional_members`: + + ``` + "serviceAccount:service-[prj-n-bu3machine-learning-number]@dataflow-service-producer-prod.iam.gserviceaccount.com", + "serviceAccount:[prj-n-bu3machine-learning-number]@cloudbuild.gserviceaccount.com", + "serviceAccount:[prj-n-bu3machine-learning-number]-compute@developer.gserviceaccount.com", + "serviceAccount:[prj-p-bu3machine-learning-number]@cloudbuild.gserviceaccount.com", + "serviceAccount:service-[prj-p-bu3machine-learning-number]@gcp-sa-aiplatform.iam.gserviceaccount.com", + ``` + +2. optional - run the below command to generate a list of the above changes needed to COMMON.AUTO.TFVARS + + ```bash + ml_n=$(terraform -chdir="gcp-projects/business_unit_3/non-production" output -raw machine_learning_project_number) + ml_p=$(terraform -chdir="gcp-projects/business_unit_3/production" output -raw machine_learning_project_number) + + echo "serviceAccount:service-${ml_n}@dataflow-service-producer-prod.iam.gserviceaccount.com", + echo "serviceAccount:${ml_n}@cloudbuild.gserviceaccount.com", + echo "serviceAccount:${ml_n}-compute@developer.gserviceaccount.com", + echo "serviceAccount:${ml_p}@cloudbuild.gserviceaccount.com", + echo "serviceAccount:service-${ml_p}@gcp-sa-aiplatform.iam.gserviceaccount.com", + ``` + +1. Many of the necessary service agents and permissions were deployed in all project environments for machine-learning. Additional entries will be needed for each environment. + +1. Add in more agents to the DEVELOPMENT.AUTO.TFVARS file under `egress_policies`. +Notably: + + * "serviceAccount:bq-[prj-d-bu3machine-learning-project-number]@bigquery-encryption.iam.gserviceaccount.com" + + This should be added under identities. It should look like this:: + + ``` + egress_policies = [ + // notebooks + { + "from" = { + "identity_type" = "" + "identities" = [ + "serviceAccount:bq-[prj-d-bu3machine-learning-project-number]@bigquery-encryption.iam.gserviceaccount.com" << New Addition + "serviceAccount:service-[prj-d-bu3machine-learning-project-number]@gcp-sa-notebooks.iam.gserviceaccount.com", + "serviceAccount:service-[prj-d-bu3machine-learning-project-number]@compute-system.iam.gserviceaccount.com", + ] + }, + "to" = { + "resources" = ["projects/[prj-d-kms-project-number]"] + "operations" = { + "compute.googleapis.com" = { + "methods" = ["*"] + } + "cloudkms.googleapis.com" = { + "methods" = ["*"] + } + } + } + }, + ] + ``` + +1. Remain in DEVELOPMENT.AUTO.TFVARS and include this entry under `egress_policies`. Ensure you replace all [project numbers] with their corresponding project: + + ``` + // artifact Registry + { + "from" = { + "identity_type" = "" + "identities" = [ + "serviceAccount:service-[prj-d-bu3machine-learning-number]@gcp-sa-aiplatform-cc.iam.gserviceaccount.com", + ] + }, + "to" = { + "resources" = ["projects/[prj-c-bu3artifacts-number]"] + "operations" = { + "artifactregistry.googleapis.com" = { + "methods" = ["*"] + } + } + } + }, + // Dataflow + { + "from" = { + "identity_type" = "" + "identities" = [ + "serviceAccount:service-[prj-n-bu3machine-learning-number]@dataflow-service-producer-prod.iam.gserviceaccount.com", + ] + }, + "to" = { + "resources" = ["projects/[prj-n-bu3machine-learning-number]"] + "operations" = { + "compute.googleapis.com" = { + "methods" = ["*"] + } + } + } + }, + ``` + +1. Under NON-PRODUCTION.AUTO.TFVARS, add these entries under `egress_policies`: + + ``` + { + "from" = { + "identity_type" = "ANY_IDENTITY" + "identities" = [] + }, + "to" = { + "resources" = [ + "projects/[prj-c-bu3artifacts-number]" + ] + "operations" = { + "artifactregistry.googleapis.com" = { + "methods" = ["*"] + } + } + } + }, + // artifact Registry + { + "from" = { + "identity_type" = "" + "identities" = [ + "serviceAccount:service-[prj-n-bu3machine-learning-number]@gcp-sa-aiplatform-cc.iam.gserviceaccount.com", + ] + }, + "to" = { + "resources" = ["projects/[prj-c-bu3artifacts-number]"] + "operations" = { + "artifactregistry.googleapis.com" = { + "methods" = ["*"] + } + } + } + }, + // DataFlow + { + "from" = { + "identity_type" = "" + "identities" = [ + "serviceAccount:service-[prj-n-bu3machine-learning-number]@dataflow-service-producer-prod.iam.gserviceaccount.com", + ] + }, + "to" = { + "resources" = ["projects/[prj-d-shared-restricted-number]"] + "operations" = { + "compute.googleapis.com" = { + "methods" = ["*"] + } + } + } + }, + { + "from" = { + "identity_type" = "" + "identities" = [ + "serviceAccount:[prj-n-bu3machine-learning-number]-compute@developer.gserviceaccount.com", + "serviceAccount:service-[prj-d-bu3machine-learning-number]@gcp-sa-aiplatform.iam.gserviceaccount.com", + ] + }, + "to" = { + "resources" = ["projects/[prj-p-bu3machine-learning-number]"] + "operations" = { + "aiplatform.googleapis.com" = { + "methods" = ["*"] + }, + "storage.googleapis.com" = { + "methods" = ["*"] + }, + "bigquery.googleapis.com" = { + "methods" = ["*"] + } + } + } + }, + ``` + +1. Under PRODUCTION.AUTO.TFVARS, add these entries under `egress_policies`: + + ``` + { + "from" = { + "identity_type" = "" + "identities" = [ + "serviceAccount:service-[prj-p-bu3machine-learning-number]@gcp-sa-aiplatform.iam.gserviceaccount.com", + "serviceAccount:service-[prj-p-bu3machine-learning-number]@gcp-sa-aiplatform-cc.iam.gserviceaccount.com", + "serviceAccount:cloud-cicd-artifact-registry-copier@system.gserviceaccount.com", + ] + }, + "to" = { + "resources" = [ + "projects/[prj-n-bu3machine-learning-number]", + "projects/[prj-c-bu3artifacts-number]", + ] + "operations" = { + "artifactregistry.googleapis.com" = { + "methods" = ["*"] + }, + "storage.googleapis.com" = { + "methods" = ["*"] + }, + "bigquery.googleapis.com" = { + "methods" = ["*"] + } + } + } + }, + ``` + +### Service Catalog + +Once you have set up service catalog and attempt to deploy out terraform code, there is a high chance you will encounter this error: +`Permission denied; please check you have the correct IAM permissions and APIs enabled.` +This is due to a VPC Service control error that until now, is impossible to add into the egress policy. Go to `prj-d-bu3machine-learning` project and view the logs, filtering for ERRORS. There will be a VPC Service Controls entry that has an `egressViolation`. It should look something like the following: +``` +egressViolations: [ + 0: { + servicePerimeter: "accessPolicies/1066661933618/servicePerimeters/sp_d_shared_restricted_default_perimeter_f3fv" + source: "projects/[machine-learning-project-number]" + sourceType: "Resource" + targetResource: "projects/[unknown-project-number]" + } +] +``` + +we want the `unknown-project-number` here. Add this into your `egress_policies` in `3-networks` under DEVELOPMENT.AUTO.TFVARS, NON-PRODUCTION.AUTO.TFVARS & PRODUCTION.AUTO.TFVARS + +``` +// Service Catalog + { + "from" = { + "identity_type" = "ANY_IDENTITY" + "identities" = [] + }, + "to" = { + "resources" = ["projects/[unknown-project-number]"] + "operations" = { + "cloudbuild.googleapis.com" = { + "methods" = ["*"] + } + } + } + }, +``` + +### Machine Learning Pipeline + +This environment is set up for interactive coding and experimentations. After the project is up, the vertex workbench is deployed from service catalog and The datascientis can use it to write their code including any experiments, data processing code and pipeline components. In addition, a cloud storage bucket is deployed to use as the storage for our operations. Optionally a composer environment is which will later be used to schedule the pipeline run on intervals. + +For our pipeline which trains and deploys a model on the [census income dataset](https://archive.ics.uci.edu/dataset/20/census+income), we use a notebook in the dev workbench to create our pipeline components, put them together into a pipeline and do a dry run of the pipeline to make sure there are no issues. You can access the repository [here](https://github.com/GoogleCloudPlatform/terraform-google-enterprise-genai/tree/main/7-vertexpipeline). [^1] + +[^1]: There is a Dockerfile in the repo which is the docker image used to run all pipeline steps and cloud build steps. In non-prod and prod environments, the only NIST compliant way to access additional dependencies and requirements is via docker images uploaded to artifact registry. We have baked everything for running the pipeline into this docker which exsits in the shared artifact registry. + +Once confident, we divide the code in two separate files to use in our CI/CD process in the non-prod environment. First is *compile_pipeline.py* which includes the code to build the pipeline and compile it into a directory (in our case, /common/vertex-ai-pipeline/pipeline_package.yaml) + +The second file, i.e. *runpipeline.py* includes the code for running the compiled pipeline. This is where the correct environemnt variables for non-prod nad prod (e.g., service accounts to use for each stage of the pipeline, kms keys corresponding to each step, buckets, etc.) are set. And eventually the pipeline is loaded from the yaml file at *common/vertex-ai-pipeline/pipeline_package.yaml* and submitted to vertex ai. + + +There is a *cloudbuild.yaml* file in the repo with the CI/CD steps as follows: + +1. Upload the Dataflow src file to the bucket in non-prod +2. Upload the dataset to the bucket +3. Run *compile_pipeline.py* to compile the pipeline +4. Run the pipeline via *runpipeline.py* +5. Optionally, upload the pipeline's yaml file to the composer bucket to make it available for scheduled pipeline runs + +The cloud build trigger will be setup in the non-prod project which is where the ML pipeline will run. There are currently three branches on the repo namely dev, staging (non-prod), and prod. Cloud build will trigger the pipeline once there is a merge into the staging (non-prod) branch from dev. However, model deployment and monitorings steps take place in the prod environment. As a result, the service agents and service accounts of the non-prod environment are given some permission on the prod environment and vice versa. + +Each time a pipeline job finishes successfully, a new version of the census income bracket predictor model will be deployed on the endpoint which will only take 25 percent of the traffic wherease the other 75 percent goes to the previous version of the model to enable A/B testing. + +You can read more about the details of the pipeline components on the [pipeline's repo](https://github.com/GoogleCloudPlatform/terraform-google-enterprise-genai/tree/main/7-vertexpipeline#readme) + +### Step by step + +Before you start, make sure you have your personal git access token ready. The git menu option on the left bar of the workbench requires the personal token to connect to git and clone the repo. +Also make sure to have a gcs bucket ready to store the artifacts for the tutorial. To deploy the bucket, you can go to service catalog and create a new deployment from the storage bucket solution. + +#### 1. Run the notebook + +- Take 7-vertexpipeline folder and make you own copy as a standalone git repository and clone it in the workbench in your dev project. Create a dev branch of the new repository. Switch to the dev branch by choosing it in the branch section of the git view. Now go back to the file browser view by clicking the first option on the left bar menu. Navigate to the directory you just clone and run [the notebook](https://github.com/GoogleCloudPlatform/terraform-google-enterprise-genai/blob/main/7-vertexpipeline/census_pipeline.ipynb) cell by cell. Pay attention to the instructions and comments in the notebook and don't forget to set the correct values corresponding to your dev project. + +#### 2. Configure cloud build + +- After the notebook runs successfully and the pipeline's test run finishes in the dev environment, create a cloud build trigger in your non-prod project. Configure the trigger to run when there is a merge into the staging (non-prod) branch by following the below settings. + + |Setting|Value| + |-------|-----| + |Event|push to branch| + |Repository generation|1st gen| + |Repository|the url to your fork of the repo| + |Branch|staging| + |Configuration|Autodetected/Cloud Build configuration file (yaml or json)| + |Location|Repository| + |Cloud Build configuration file location|cloudbuild.yaml| + + +- Open the cloudbuild.yaml file in your workbench and for steps 1 which uploads the source code for the dataflow job to your bucket. + + ``` + name: 'gcr.io/cloud-builders/gsutil' + args: ['cp', '-r', './src', 'gs://{your-bucket-name}'] + ``` + +- Similarly in step 2, replace the bucket name with the name of your own bucket in the non-prod project in order to upload the data to your bucket: + ``` + name: 'gcr.io/cloud-builders/gsutil' + args: ['cp', '-r', './data', 'gs://{your-bucket-name}'] + ``` + +- Change the name of the image for step 3 and 4 to that of your own artifact project, i.e., `us-central1-docker.pkg.dev/{artifact_project_id}/c-publish-artifacts/vertexpipeline:v2` This is the project with artifact registry that houses the image required to run the pipeline. + +``` + - name: 'us-central1-docker.pkg.dev/{your-artifact-project}/c-publish-artifacts/vertexpipeline:v2' + entrypoint: 'python' + args: ['compile_pipeline.py'] + id: 'compile_job' + + # run pipeline + - name: 'us-central1-docker.pkg.dev/{your-artifact-project}/c-publish-artifacts/vertexpipeline:v2' + entrypoint: 'python' + args: ['runpipeline.py'] + id: 'run_job' + waitFor: ['compile_job'] +``` + +- Optionally, if you want to schedule pipeline runs on regular intervals, uncomment the last two steps and replace the composer bucket with the name of your composer's bucket. The first step uploads the pipeline's yaml to the bucket and the second step uploads the dag to read that yaml and trigger the vertex pipeline: +``` + # upload to composer + - name: 'gcr.io/cloud-builders/gsutil' + args: ['cp', './common/vertex-ai-pipeline/pipeline_package.yaml', 'gs://{your-composer-bucket}/dags/common/vertex-ai-pipeline/'] + id: 'upload_composer_file' + + # upload pipeline dag to composer + - name: 'gcr.io/cloud-builders/gsutil' + args: ['cp', './composer/dags/dag.py', 'gs://{your-composer-bucket}/dags/'] + id: 'upload dag' +``` + +#### 3. Configure variables in compile_pipeline.py and runpipeline.py + +- Make sure to set the correct values for variables like **PROJECT_ID**, **BUCKET_URI**, encryption keys and service accounts, etc.: + + |variable|definition|example value|How to obtain| + |--------|----------|-------------|-------------| + |PROJECT_ID|The id of the non-prod project|`{none-prod-project-id}`|From the project's menu in console navigate to the `fldr-non-production/fldr-non-production-bu3` folder; here you can find the machine learning project in non-prod (`prj-n-bu3machine-learning`) and obtain its' ID| + |BUCKET_URI|URI of the non-prod bucket|`gs://non-prod-bucket`|From the project menu in console navigate to the non-prod ML project `fldr-non-production/fldr-non-production-bu3/prj-n-bu3machine-learning` project, navigate to cloud storage and copy the name of the bucket available there| + |REGION|The region for pipeline jobs|Can be left as default `us-central1`| + |PROD_PROJECT_ID|ID of the prod project|`prod-project-id`|In console's project menu, navigate to the `fldr-production/fldr-production-bu3` folder; here you can find the machine learning project in prod (`prj-p-bu3machine-learning`) and obtain its' ID| + |Image|The image artifact used to run the pipeline components. The image is already built and pushed to the artifact repository in your artifact project under the common folder|`f"us-central1-docker.pkg.dev/{{artifact-project}}/{{artifact-repository}}/vertexpipeline:v2"`|Navigate to `fldr-common/prj-c-bu3artifacts` project. Navigate to the artifact registry repositories in the project to find the full name of the image artifact.| + |DATAFLOW_SUBNET|The shared subnet in non-prod env required to run the dataflow job|`https://www.googleapis.com/compute/v1/projects/{non-prod-network-project}/regions/us-central1/subnetworks/{subnetwork-name}`|Navigate to the `fldr-network/prj-n-shared-restricted` project. Navigate to the VPC networks and under the subnets tab, find the name of the network associated with your region (us-central1)| + |SERVICE_ACCOUNT|The service account used to run the pipeline and it's components such as the model monitoring job. This is the compute default service account of non-prod if you don't plan on using another costume service account|`{non-prod-project_number}-compute@developer.gserviceaccount.com`|Head over to the IAM page in the non-prod project `fldr-non-production/fldr-non-production-bu3/prj-n-bu3machine-learning`, check the box for `Include Google-provided role grants` and look for the service account with the `{project_number}-compute@developer.gserviceaccount.com`| + |PROD_SERICE_ACCOUNT|The service account used to create endpoint, upload the model, and deploy the model in the prod project. This is the compute default service account of prod if you don't plan on using another costume service account|`{prod-project_number}-compute@developer.gserviceaccount.com`|Head over to the IAM page in the prod project `fldr-production/fldr-production-bu3/prj-p-bu3machine-learning`, check the box for `Include Google-provided role grants` and look for the service account with the `{project_number}-compute@developer.gserviceaccount.com`| + |deployment_config['encryption']|The kms key for the prod env. This key is used to encrypt the vertex model, endpoint, model deployment, and model monitoring.|`projects/{prod-kms-project}/locations/us-central1/keyRings/{keyring-name}/cryptoKeys/{key-name}`|Navigate to `fldr-production/prj-n-kms`, navigate to the Security/Key management in that project to find the key in `sample-keyring` keyring of your target region `us-central1`| + |encryption_spec_key_name|The name of the encryption key for the non-prod env. This key is used to create the vertex pipeline job and it's associated metadata store|`projects/{non-prod-kms-project}/locations/us-central1/keyRings/{keyring-name}/cryptoKeys/{key-name}`|Navigate to `fldr-non-production/prj-n-kms`, navigate to the Security/Key management in that project to find the key in `sample-keyring` keyring of your target region `us-central1`| + |monitoring_config['email']|The email that Vertex AI monitoring will email alerts to|`your email`|your email associated with your gcp account| + +The compile_pipeline.py and runpipeline.py files are commented to point out these variables. + +#### 4. Merge and deploy + +- Once everything is configured, you can commit your changes and push to the dev branch. Then, create a PR to from dev to staging(non-prod) which will result in triggering the pipeline if approved. The vertex pipeline takes about 30 minutes to finish and if there are no errors, a trained model will be deployed to and endpoint in the prod project which you can use to make prediction requests. + +### 5. Model Validation + +Once you have the model running at an endpoint in the production project, you will be able to test it. +Here are step-by-step instructions to make a request to your model using `gcloud` and `curl`: + +1. Initialize variables on your terminal session + + ```bash + ENDPOINT_ID= + PROJECT_ID= + INPUT_DATA_FILE="body.json" + ``` + + > You can retrieve your ENDPOINT_ID by running `gcloud ai endpoints list --region=us-central1 --project=` or by navigating to it on the Google Cloud Console (https://console.cloud.google.com/vertex-ai/online-prediction/endpoints?project=`) + +2. Create a file named `body.json` and put some sample data into it: + + ```json + { + "instances": [ + { + "features/gender": "Female", + "features/workclass": "Private", + "features/occupation": "Tech-support", + "features/marital_status": "Married-civ-spouse", + "features/race": "White", + "features/capital_gain": 0, + "features/education": "9th", + "features/age": 33, + "features/hours_per_week": 40, + "features/relationship": "Wife", + "features/native_country": "Canada", + "features/capital_loss": 0 + } + ] + } + ``` + +3. Run a curl request using `body.json` file as the JSON Body. + + ```bash + curl -X POST -H "Authorization: Bearer $(gcloud auth print-access-token)" \ + -H "Content-Type: application/json" \ + https://us-central1-aiplatform.googleapis.com/v1/projects/${PROJECT_ID}/locations/us-central1/endpoints/${ENDPOINT_ID}:predict -d "@${INPUT_DATA_FILE}" + ``` + + - You should get an output from 0 to 1, indicating the level of confidence of the binary classification based on the parameters above. + Values closer to 1 means the individual is more likely to be included in the income_bracket greater than 50K. + +#### Common errors + +- ***google.api_core.exceptions.ResourceExhausted: 429 The following quotas are exceeded: ```CustomModelServingCPUsPerProjectPerRegion 8: The following quotas are exceeded: CustomModelServingCPUsPerProjectPerRegion``` or similar error***: +This is likely due to the fact that you have too many models uploaded and deployed in Vertex AI. To resolve the issue, you can either submit a quota increase request or undeploy and delete a few models to free up resources + +- ***Google Compute Engine Metadata service not available/found***: +You might encounter this when the vertex pipeline job attempts to run even though it is an obsolete issue according to [this thread](https://issuetracker.google.com/issues/229537245#comment9). It'll most likely resolve by re-running the vertex pipeline diff --git a/5-app-infra/6-machine-learning/business_unit_3/development/README.md b/examples/machine-learning-pipeline/business_unit_3/development/README.md similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/development/README.md rename to examples/machine-learning-pipeline/business_unit_3/development/README.md diff --git a/5-app-infra/6-machine-learning/business_unit_3/development/backend.tf b/examples/machine-learning-pipeline/business_unit_3/development/backend.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/development/backend.tf rename to examples/machine-learning-pipeline/business_unit_3/development/backend.tf diff --git a/5-app-infra/1-artifact-publish/business_unit_3/shared/common.auto.tfvars b/examples/machine-learning-pipeline/business_unit_3/development/common.auto.tfvars similarity index 100% rename from 5-app-infra/1-artifact-publish/business_unit_3/shared/common.auto.tfvars rename to examples/machine-learning-pipeline/business_unit_3/development/common.auto.tfvars diff --git a/5-app-infra/6-machine-learning/business_unit_3/development/locals.tf b/examples/machine-learning-pipeline/business_unit_3/development/locals.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/development/locals.tf rename to examples/machine-learning-pipeline/business_unit_3/development/locals.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/development/main.tf b/examples/machine-learning-pipeline/business_unit_3/development/main.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/development/main.tf rename to examples/machine-learning-pipeline/business_unit_3/development/main.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/development/outputs.tf b/examples/machine-learning-pipeline/business_unit_3/development/outputs.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/development/outputs.tf rename to examples/machine-learning-pipeline/business_unit_3/development/outputs.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/development/remote.tf b/examples/machine-learning-pipeline/business_unit_3/development/remote.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/development/remote.tf rename to examples/machine-learning-pipeline/business_unit_3/development/remote.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/development/variables.tf b/examples/machine-learning-pipeline/business_unit_3/development/variables.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/development/variables.tf rename to examples/machine-learning-pipeline/business_unit_3/development/variables.tf diff --git a/5-app-infra/1-artifact-publish/business_unit_3/shared/versions.tf b/examples/machine-learning-pipeline/business_unit_3/development/versions.tf similarity index 100% rename from 5-app-infra/1-artifact-publish/business_unit_3/shared/versions.tf rename to examples/machine-learning-pipeline/business_unit_3/development/versions.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/non-production/README.md b/examples/machine-learning-pipeline/business_unit_3/non-production/README.md similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/non-production/README.md rename to examples/machine-learning-pipeline/business_unit_3/non-production/README.md diff --git a/5-app-infra/6-machine-learning/business_unit_3/non-production/backend.tf b/examples/machine-learning-pipeline/business_unit_3/non-production/backend.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/non-production/backend.tf rename to examples/machine-learning-pipeline/business_unit_3/non-production/backend.tf diff --git a/5-app-infra/3-service-catalog/business_unit_3/shared/common.auto.tfvars b/examples/machine-learning-pipeline/business_unit_3/non-production/common.auto.tfvars similarity index 100% rename from 5-app-infra/3-service-catalog/business_unit_3/shared/common.auto.tfvars rename to examples/machine-learning-pipeline/business_unit_3/non-production/common.auto.tfvars diff --git a/5-app-infra/6-machine-learning/business_unit_3/non-production/locals.tf b/examples/machine-learning-pipeline/business_unit_3/non-production/locals.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/non-production/locals.tf rename to examples/machine-learning-pipeline/business_unit_3/non-production/locals.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/non-production/main.tf b/examples/machine-learning-pipeline/business_unit_3/non-production/main.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/non-production/main.tf rename to examples/machine-learning-pipeline/business_unit_3/non-production/main.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/non-production/outputs.tf b/examples/machine-learning-pipeline/business_unit_3/non-production/outputs.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/non-production/outputs.tf rename to examples/machine-learning-pipeline/business_unit_3/non-production/outputs.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/non-production/remote.tf b/examples/machine-learning-pipeline/business_unit_3/non-production/remote.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/non-production/remote.tf rename to examples/machine-learning-pipeline/business_unit_3/non-production/remote.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/non-production/variables.tf b/examples/machine-learning-pipeline/business_unit_3/non-production/variables.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/non-production/variables.tf rename to examples/machine-learning-pipeline/business_unit_3/non-production/variables.tf diff --git a/5-app-infra/3-service-catalog/business_unit_3/shared/versions.tf b/examples/machine-learning-pipeline/business_unit_3/non-production/versions.tf similarity index 100% rename from 5-app-infra/3-service-catalog/business_unit_3/shared/versions.tf rename to examples/machine-learning-pipeline/business_unit_3/non-production/versions.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/production/README.md b/examples/machine-learning-pipeline/business_unit_3/production/README.md similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/production/README.md rename to examples/machine-learning-pipeline/business_unit_3/production/README.md diff --git a/5-app-infra/6-machine-learning/business_unit_3/production/backend.tf b/examples/machine-learning-pipeline/business_unit_3/production/backend.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/production/backend.tf rename to examples/machine-learning-pipeline/business_unit_3/production/backend.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/development/common.auto.tfvars b/examples/machine-learning-pipeline/business_unit_3/production/common.auto.tfvars similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/development/common.auto.tfvars rename to examples/machine-learning-pipeline/business_unit_3/production/common.auto.tfvars diff --git a/5-app-infra/6-machine-learning/business_unit_3/production/locals.tf b/examples/machine-learning-pipeline/business_unit_3/production/locals.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/production/locals.tf rename to examples/machine-learning-pipeline/business_unit_3/production/locals.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/production/main.tf b/examples/machine-learning-pipeline/business_unit_3/production/main.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/production/main.tf rename to examples/machine-learning-pipeline/business_unit_3/production/main.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/production/outputs.tf b/examples/machine-learning-pipeline/business_unit_3/production/outputs.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/production/outputs.tf rename to examples/machine-learning-pipeline/business_unit_3/production/outputs.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/production/remote.tf b/examples/machine-learning-pipeline/business_unit_3/production/remote.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/production/remote.tf rename to examples/machine-learning-pipeline/business_unit_3/production/remote.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/production/variables.tf b/examples/machine-learning-pipeline/business_unit_3/production/variables.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/production/variables.tf rename to examples/machine-learning-pipeline/business_unit_3/production/variables.tf diff --git a/5-app-infra/6-machine-learning/business_unit_3/development/versions.tf b/examples/machine-learning-pipeline/business_unit_3/production/versions.tf similarity index 100% rename from 5-app-infra/6-machine-learning/business_unit_3/development/versions.tf rename to examples/machine-learning-pipeline/business_unit_3/production/versions.tf diff --git a/5-app-infra/6-machine-learning/common.auto.example.tfvars b/examples/machine-learning-pipeline/common.auto.example.tfvars similarity index 100% rename from 5-app-infra/6-machine-learning/common.auto.example.tfvars rename to examples/machine-learning-pipeline/common.auto.example.tfvars diff --git a/5-app-infra/6-machine-learning/modules/base_env/data.tf b/examples/machine-learning-pipeline/modules/base_env/data.tf similarity index 100% rename from 5-app-infra/6-machine-learning/modules/base_env/data.tf rename to examples/machine-learning-pipeline/modules/base_env/data.tf diff --git a/5-app-infra/6-machine-learning/modules/base_env/iam.tf b/examples/machine-learning-pipeline/modules/base_env/iam.tf similarity index 100% rename from 5-app-infra/6-machine-learning/modules/base_env/iam.tf rename to examples/machine-learning-pipeline/modules/base_env/iam.tf diff --git a/5-app-infra/6-machine-learning/modules/base_env/main.tf b/examples/machine-learning-pipeline/modules/base_env/main.tf similarity index 100% rename from 5-app-infra/6-machine-learning/modules/base_env/main.tf rename to examples/machine-learning-pipeline/modules/base_env/main.tf diff --git a/5-app-infra/6-machine-learning/modules/base_env/outputs.tf b/examples/machine-learning-pipeline/modules/base_env/outputs.tf similarity index 100% rename from 5-app-infra/6-machine-learning/modules/base_env/outputs.tf rename to examples/machine-learning-pipeline/modules/base_env/outputs.tf diff --git a/5-app-infra/6-machine-learning/modules/base_env/roles.tf b/examples/machine-learning-pipeline/modules/base_env/roles.tf similarity index 100% rename from 5-app-infra/6-machine-learning/modules/base_env/roles.tf rename to examples/machine-learning-pipeline/modules/base_env/roles.tf diff --git a/5-app-infra/6-machine-learning/modules/base_env/variables.tf b/examples/machine-learning-pipeline/modules/base_env/variables.tf similarity index 100% rename from 5-app-infra/6-machine-learning/modules/base_env/variables.tf rename to examples/machine-learning-pipeline/modules/base_env/variables.tf