Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CES-157] - Implement tooling for storage account migration from WEU to ITN #1238

Open
wants to merge 31 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 20 commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
822e19d
Aggiunta codice di test del modulo per Azure data factory
ccuffari Oct 10, 2024
2e1af1d
File folder structure check
ccuffari Oct 11, 2024
5bb5809
Update italynorth.tf
ccuffari Oct 15, 2024
dab8db1
feat: started refactoring
christian-calabrese Oct 15, 2024
4e11cf8
chore: what_to_migrate variable added
christian-calabrese Oct 15, 2024
7760c8f
feat: added iam and fixed pipelines
christian-calabrese Oct 16, 2024
f9aa68d
feat: added missing datasets and checks
christian-calabrese Oct 16, 2024
9d2c460
fix: changed folder structure and ran pre-commit
christian-calabrese Oct 16, 2024
0c4c0a8
feat: added the example list
christian-calabrese Oct 16, 2024
2e0da2f
fix: errors and tested
christian-calabrese Oct 16, 2024
0a92fb2
Merge branch 'main' into bip-dev
christian-calabrese Oct 16, 2024
a28f762
chore: ran pre-commit
christian-calabrese Oct 16, 2024
7f81909
feat: using instance number from local
christian-calabrese Oct 16, 2024
09d17e6
feat: adf public_network_enabled set to false
christian-calabrese Oct 16, 2024
7200a5a
feat: added managed virtual network support to adf
christian-calabrese Oct 16, 2024
59cac9f
fix: changed from jsonsource to binarysource
christian-calabrese Oct 16, 2024
5ea38bf
Merge branch 'main' into bip-dev
christian-calabrese Oct 16, 2024
0e714ca
fix: references to resources
christian-calabrese Oct 17, 2024
041e94b
feat: subdivide datafactory resources in folders
christian-calabrese Oct 17, 2024
088d468
feat: substitute dollar sign with underscore in adf resource names
christian-calabrese Oct 17, 2024
1190c2b
fix: re added state configuration
christian-calabrese Oct 17, 2024
f43ca19
fix: comment
christian-calabrese Oct 17, 2024
3f68237
fix: deleted test storage accounts
christian-calabrese Oct 17, 2024
02bc83b
fix: updated lock with platforms
mamu0 Oct 17, 2024
2521718
Merge branch 'main' into bip-dev
christian-calabrese Oct 21, 2024
f5edf0c
Merge branch 'main' into bip-dev
christian-calabrese Oct 22, 2024
ec80614
Merge branch 'main' into bip-dev
christian-calabrese Oct 28, 2024
f0e366a
Merge branch 'main' into bip-dev
christian-calabrese Oct 29, 2024
c9b8318
Merge branch 'main' into bip-dev
christian-calabrese Oct 29, 2024
d241f7d
Merge branch 'main' into bip-dev
christian-calabrese Nov 4, 2024
0f1374b
Merge branch 'main' into bip-dev
christian-calabrese Nov 5, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions src/_modules/data_factory_storage_account/data.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
data "azurerm_storage_account" "source" {
name = var.storage_accounts.source.name
resource_group_name = var.storage_accounts.source.resource_group_name
}

data "azurerm_storage_account" "target" {
name = var.storage_accounts.target.name
resource_group_name = var.storage_accounts.target.resource_group_name
}

data "azurerm_storage_containers" "this" {
count = var.what_to_migrate.blob.enabled && length(var.what_to_migrate.blob.containers) == 0 ? 1 : 0
storage_account_id = data.azurerm_storage_account.source.id
}

data "azapi_resource_list" "tables" {
Krusty93 marked this conversation as resolved.
Show resolved Hide resolved
type = "Microsoft.Storage/storageAccounts/tableServices/tables@2021-09-01"
parent_id = "${data.azurerm_storage_account.source.id}/tableServices/default"
response_export_values = ["*"]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
resource "azurerm_data_factory_custom_dataset" "source_dataset_container" {
for_each = toset(local.containers)
name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value}-blob-${module.naming_convention.suffix}", "/[$-]/", "_")
data_factory_id = var.data_factory_id
type = "AzureBlob"
folder = "${var.storage_accounts.source.name}/source/blob"

linked_service {
name = azurerm_data_factory_linked_service_azure_blob_storage.source_linked_service_blob[0].name
}

type_properties_json = jsonencode({
linkedServiceName = {
referenceName = azurerm_data_factory_linked_service_azure_blob_storage.source_linked_service_blob[0].name
type = "LinkedServiceReference"
}
type = "AzureBlob"
folderPath = each.value
})
}

resource "azurerm_data_factory_custom_dataset" "target_dataset_container" {
for_each = toset(local.containers)
name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-${each.value}-blob-${module.naming_convention.suffix}", "/[$-]/", "_")
data_factory_id = var.data_factory_id
type = "AzureBlob"
folder = "${var.storage_accounts.source.name}/target/blob"

linked_service {
name = azurerm_data_factory_linked_service_azure_blob_storage.target_linked_service_blob[0].name
}

type_properties_json = jsonencode({
linkedServiceName = {
referenceName = azurerm_data_factory_linked_service_azure_blob_storage.target_linked_service_blob[0].name
type = "LinkedServiceReference"
}
type = "AzureBlob"
folderPath = each.value
})
}
30 changes: 30 additions & 0 deletions src/_modules/data_factory_storage_account/datasets_tables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
resource "azurerm_data_factory_custom_dataset" "source_dataset_table" {
for_each = toset(local.tables)
name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value}-table-${module.naming_convention.suffix}", "/[$-]/", "_")
data_factory_id = var.data_factory_id
type = "AzureTable"
folder = "${var.storage_accounts.source.name}/source/table"

linked_service {
name = azurerm_data_factory_linked_service_azure_table_storage.source_linked_service_table[0].name
}

type_properties_json = jsonencode({
tableName = each.value
})
}

resource "azurerm_data_factory_custom_dataset" "target_dataset_table" {
for_each = toset(local.tables)
name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-${each.value}-table-${module.naming_convention.suffix}", "/[$-]/", "_")
data_factory_id = var.data_factory_id
type = "AzureTable"
folder = "${var.storage_accounts.source.name}/target/table"
linked_service {
name = azurerm_data_factory_linked_service_azure_table_storage.target_linked_service_table[0].name
}

type_properties_json = jsonencode({
tableName = each.value
})
}
40 changes: 40 additions & 0 deletions src/_modules/data_factory_storage_account/iam.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
module "roles" {
source = "github.com/pagopa/dx//infra/modules/azure_role_assignments?ref=main"
principal_id = var.data_factory_principal_id

storage_blob = var.what_to_migrate.blob.enabled ? [
{
storage_account_name = var.storage_accounts.source.name
resource_group_name = var.storage_accounts.source.resource_group_name
role = "reader"
},
{
storage_account_name = var.storage_accounts.target.name
resource_group_name = var.storage_accounts.target.resource_group_name
role = "writer"
}
] : []

# ADF terraform resources still force to use connection strings for tables
# but it's possible to switch to managed identities from the portal
storage_table = var.what_to_migrate.table.enabled ? [
{
storage_account_name = var.storage_accounts.source.name
resource_group_name = var.storage_accounts.source.resource_group_name
role = "reader"
},
{
storage_account_name = var.storage_accounts.target.name
resource_group_name = var.storage_accounts.target.resource_group_name
role = "writer"
}
] : []
}

# add to var.data_factory_principal_id the Storage Account Contributor role using the azurerm_role_assignment resourcew
resource "azurerm_role_assignment" "storage_account_contributor" {
count = var.what_to_migrate.table.enabled ? 1 : 0
scope = data.azurerm_storage_account.target.id
role_definition_name = "Storage Account Contributor"
principal_id = var.data_factory_principal_id
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
resource "azurerm_data_factory_linked_service_azure_blob_storage" "source_linked_service_blob" {
count = var.what_to_migrate.blob.enabled ? 1 : 0
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-blob-${module.naming_convention.suffix}"
data_factory_id = var.data_factory_id

service_endpoint = "https://${data.azurerm_storage_account.source.name}.blob.core.windows.net"

use_managed_identity = true
}

resource "azurerm_data_factory_linked_service_azure_blob_storage" "target_linked_service_blob" {
count = var.what_to_migrate.blob.enabled ? 1 : 0
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-blob-${module.naming_convention.suffix}"
data_factory_id = var.data_factory_id

service_endpoint = "https://${data.azurerm_storage_account.target.name}.blob.core.windows.net"

use_managed_identity = true
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
resource "azurerm_data_factory_linked_service_azure_table_storage" "source_linked_service_table" {
count = var.what_to_migrate.table.enabled ? 1 : 0
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-table-${module.naming_convention.suffix}"
data_factory_id = var.data_factory_id

connection_string = data.azurerm_storage_account.source.primary_connection_string
}

resource "azurerm_data_factory_linked_service_azure_table_storage" "target_linked_service_table" {
count = var.what_to_migrate.table.enabled ? 1 : 0
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-table-${module.naming_convention.suffix}"
data_factory_id = var.data_factory_id

connection_string = data.azurerm_storage_account.target.primary_connection_string
}
6 changes: 6 additions & 0 deletions src/_modules/data_factory_storage_account/locals.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
locals {
containers = var.what_to_migrate.blob.enabled ? (length(var.what_to_migrate.blob.containers) > 0 ? var.what_to_migrate.blob.containers : [for container in data.azurerm_storage_containers.this[0].containers : container.name]) : []

azapi_tables = jsondecode(data.azapi_resource_list.tables.output)
tables = var.what_to_migrate.table.enabled ? (length(var.what_to_migrate.table.tables) > 0 ? var.what_to_migrate.table.tables : [for table in local.azapi_tables.value : table.name]) : []
}
21 changes: 21 additions & 0 deletions src/_modules/data_factory_storage_account/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
terraform {
required_providers {
azapi = {
source = "Azure/azapi"
version = "<= 1.15.0"
}
}
}

module "naming_convention" {
source = "github.com/pagopa/dx//infra/modules/azure_naming_convention/?ref=main"

environment = {
prefix = var.environment.prefix
env_short = var.environment.env_short
location = var.environment.location
domain = var.environment.domain
app_name = var.environment.app_name
instance_number = var.environment.instance_number
}
}
31 changes: 31 additions & 0 deletions src/_modules/data_factory_storage_account/network.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
resource "azurerm_data_factory_managed_private_endpoint" "blob_source" {
count = var.what_to_migrate.blob.enabled ? 1 : 0
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-blob-${module.naming_convention.suffix}"
data_factory_id = var.data_factory_id
target_resource_id = data.azurerm_storage_account.source.id
subresource_name = "blob"
}

resource "azurerm_data_factory_managed_private_endpoint" "blob_target" {
count = var.what_to_migrate.blob.enabled ? 1 : 0
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-blob-${module.naming_convention.suffix}"
data_factory_id = var.data_factory_id
target_resource_id = data.azurerm_storage_account.target.id
subresource_name = "blob"
}

resource "azurerm_data_factory_managed_private_endpoint" "table_source" {
count = var.what_to_migrate.table.enabled ? 1 : 0
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-table-${module.naming_convention.suffix}"
data_factory_id = var.data_factory_id
target_resource_id = data.azurerm_storage_account.source.id
subresource_name = "table"
}

resource "azurerm_data_factory_managed_private_endpoint" "table_target" {
count = var.what_to_migrate.table.enabled ? 1 : 0
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-table-${module.naming_convention.suffix}"
data_factory_id = var.data_factory_id
target_resource_id = data.azurerm_storage_account.target.id
subresource_name = "table"
}
10 changes: 10 additions & 0 deletions src/_modules/data_factory_storage_account/outputs.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
output "pipelines" {
value = {
for pipeline in merge(azurerm_data_factory_pipeline.pipeline_container, azurerm_data_factory_pipeline.pipeline_table)
: pipeline.name => {
id = pipeline.id
name = pipeline.name
url = "https://adf.azure.com/en/authoring/pipeline/${pipeline.name}?factory=${pipeline.data_factory_id}"
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
resource "azurerm_data_factory_pipeline" "pipeline_container" {
for_each = toset(local.containers)
name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value}-blob-${module.naming_convention.suffix}", "/[$-]/", "_")
data_factory_id = var.data_factory_id
folder = "${var.storage_accounts.source.name}/blob"

activities_json = jsonencode(
[
{
name = "CopyActivity"
type = "Copy"
dependsOn = []
policy = {
timeout = "0.12:00:00"
retry = 0
retryIntervalInSeconds = 30
secureOutput = false
secureInput = false
}
userProperties = []
typeProperties = {
source = {
type = "BinarySource"
storeSettings = {
type = "AzureBlobStorageReadSettings"
recursive = true
enablePartitionDiscovery = false
wildcardFileName = "*" # Copy all files
}
formatSettings = {
type = ""
}
}
sink = {
type = "BinarySink"
storeSettings = {
type = "AzureBlobStorageWriteSettings"
}
formatSettings = {
type = ""
}
}
enableStaging = false
}
inputs = [
{
referenceName = azurerm_data_factory_custom_dataset.source_dataset_container[each.value].name
type = "DatasetReference"
}
]
outputs = [
{
referenceName = azurerm_data_factory_custom_dataset.target_dataset_container[each.value].name
type = "DatasetReference"
}
]
}
]
)
}
58 changes: 58 additions & 0 deletions src/_modules/data_factory_storage_account/pipeline_tables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
resource "azurerm_data_factory_pipeline" "pipeline_table" {
for_each = toset(local.tables)
name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value}-table-${module.naming_convention.suffix}", "/[$-]/", "_")
data_factory_id = var.data_factory_id
folder = "${var.storage_accounts.source.name}/table"

activities_json = jsonencode(
[
{
name = "CopyActivity"
type = "Copy"
dependsOn = []
policy = {
timeout = "0.12:00:00"
retry = 0
retryIntervalInSeconds = 30
secureOutput = false
secureInput = false
}
userProperties = []
typeProperties = {
source = {
type = "AzureTableSource"
azureTableSourceIgnoreTableNotFound = false
}
sink = {
type = "AzureTableSink"
writeBatchSize = 10000
writeBatchTimeout = "00:02:00"
azureTableInsertType = "merge",
azureTablePartitionKeyName = {
value = "PartitionKey",
type = "Expression"
},
azureTableRowKeyName = {
value = "RowKey",
type = "Expression"
},
}
enableStaging = false
}
inputs = [
{
referenceName = azurerm_data_factory_custom_dataset.source_dataset_table[each.value].name
type = "DatasetReference"
}
]
outputs = [
{
referenceName = azurerm_data_factory_custom_dataset.target_dataset_table[each.value].name
type = "DatasetReference"
}
]
}
]
)

}
Loading
Loading