Skip to content

Commit

Permalink
feat: added missing datasets and checks
Browse files Browse the repository at this point in the history
  • Loading branch information
christian-calabrese committed Oct 16, 2024
1 parent 7760c8f commit f9aa68d
Show file tree
Hide file tree
Showing 6 changed files with 49 additions and 16 deletions.
2 changes: 1 addition & 1 deletion src/_modules/data_factory_storage_account/data.tf
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,6 @@ data "azurerm_storage_account" "target" {
}

data "azurerm_storage_containers" "this" {
for_each = length(var.what_to_migrate.containers) == 0 ? [1] : []
for_each = var.what_to_migrate.blob.enabled && length(var.what_to_migrate.blob.containers) == 0 ? [1] : []
storage_account_id = data.azurerm_storage_account.source.id
}
24 changes: 22 additions & 2 deletions src/_modules/data_factory_storage_account/datasets_containers.tf
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
resource "azurerm_data_factory_custom_dataset" "dataset_container" {
resource "azurerm_data_factory_custom_dataset" "source_dataset_container" {
for_each = local.containers
name = "${module.naming_convention.prefix}-adf-${each.value.name}-blob-${module.naming_convention.suffix}"
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value.name}-blob-${module.naming_convention.suffix}"
data_factory_id = var.data_factory.id
type = "AzureBlob"

Expand All @@ -16,4 +16,24 @@ resource "azurerm_data_factory_custom_dataset" "dataset_container" {
type = "AzureBlob"
folderPath = each.value.name
})
}

resource "azurerm_data_factory_custom_dataset" "target_dataset_container" {
for_each = local.containers
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-${each.value.name}-blob-${module.naming_convention.suffix}"
data_factory_id = var.data_factory.id
type = "AzureBlob"

linked_service {
name = azurerm_data_factory_linked_service_azure_blob_storage.target_linked_service_blob.name
}

type_properties_json = jsonencode({
linkedServiceName = {
referenceName = azurerm_data_factory_linked_service_azure_blob_storage.target_linked_service_blob.name
type = "LinkedServiceReference"
}
type = "AzureBlob"
folderPath = each.value.name
})
}
21 changes: 18 additions & 3 deletions src/_modules/data_factory_storage_account/datasets_tables.tf
Original file line number Diff line number Diff line change
@@ -1,11 +1,26 @@
resource "azurerm_data_factory_custom_dataset" "dataset_table" {
resource "azurerm_data_factory_custom_dataset" "source_dataset_table" {
for_each = local.tables
name = "${module.naming_convention.prefix}-adf-${each.value.name}-table-${module.naming_convention.suffix}"
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value.name}-table-${module.naming_convention.suffix}"
data_factory_id = var.data_factory.id
type = "AzureTable"

linked_service {
name = azurerm_data_factory_linked_service_azure_blob_storage.linked_service_container[each.key].name
name = azurerm_data_factory_linked_service_azure_blob_storage.source_linked_service_table.name
}

type_properties_json = jsonencode({
tableName = each.value.name
})
}

resource "azurerm_data_factory_custom_dataset" "target_dataset_table" {
for_each = local.tables
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.target.name}-${each.value.name}-table-${module.naming_convention.suffix}"
data_factory_id = var.data_factory.id
type = "AzureTable"

linked_service {
name = azurerm_data_factory_linked_service_azure_blob_storage.target_linked_service_table.name
}

type_properties_json = jsonencode({
Expand Down
4 changes: 2 additions & 2 deletions src/_modules/data_factory_storage_account/locals.tf
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
locals {
containers = length(var.what_to_migrate.blob.containers) > 0 ? var.what_to_migrate.blob.containers : [for container in data.azurerm_storage_containers.this[0].containers : container.name]
tables = var.what_to_migrate.table.tables
containers = var.what_to_migrate.blob.enabled ? length(var.what_to_migrate.blob.containers) > 0 ? var.what_to_migrate.blob.containers : [for container in data.azurerm_storage_containers.this[0].containers : container.name] : []
tables = var.what_to_migrate.table.enabled ? var.what_to_migrate.table.tables : []
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
resource "azurerm_data_factory_pipeline" "pipeline_container" {
for_each = local.containers
name = "${module.naming_convention.prefix}-adf-${each.value.name}-blob-${module.naming_convention.suffix}"
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value.name}-blob-${module.naming_convention.suffix}"
data_factory_id = var.data_factory.id

depends_on = [
Expand Down Expand Up @@ -35,7 +35,7 @@ resource "azurerm_data_factory_pipeline" "pipeline_container" {
}
}
sink = {
type = "JsonSink" # Check for binary
type = "JsonSink"
storeSettings = {
type = "AzureBlobStorageWriteSettings"
}
Expand Down
10 changes: 4 additions & 6 deletions src/_modules/data_factory_storage_account/pipeline_tables.tf
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
resource "azurerm_data_factory_pipeline" "pipeline_table" {
for_each = local.tables
name = "${module.naming_convention.prefix}-adf-${each.value.name}-table-${module.naming_convention.suffix}"
name = "${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value.name}-table-${module.naming_convention.suffix}"
data_factory_id = var.data_factory.id

variables = each.value.variables

depends_on = [
azurerm_data_factory_custom_dataset.dataset_table
]
Expand All @@ -31,19 +29,19 @@ resource "azurerm_data_factory_pipeline" "pipeline_table" {
sink = {
type = "AzureTableSink"
writeBatchSize = 10000
writeBatchTimeout = "00:00:30"
writeBatchTimeout = "00:02:00"
}
enableStaging = false
}
inputs = [
{
referenceName = each.value.input_dataset
referenceName = azurerm_data_factory_custom_dataset.source_dataset_table
type = "DatasetReference"
}
]
outputs = [
{
referenceName = each.value.output_dataset
referenceName = azurerm_data_factory_custom_dataset.target_dataset_table
type = "DatasetReference"
}
]
Expand Down

0 comments on commit f9aa68d

Please sign in to comment.