Skip to content

Commit

Permalink
feat(LOPS-1069): add methods to update integration (#1609)
Browse files Browse the repository at this point in the history
  • Loading branch information
FannyGaudin authored Jan 2, 2024
1 parent 6f189e2 commit 5bc293c
Show file tree
Hide file tree
Showing 8 changed files with 253 additions and 3 deletions.
20 changes: 20 additions & 0 deletions src/kili/adapters/kili_api_gateway/cloud_storage/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import Dict, Generator, Optional

from kili.adapters.kili_api_gateway.base import BaseOperationMixin
from kili.adapters.kili_api_gateway.cloud_storage.types import DataIntegrationData
from kili.adapters.kili_api_gateway.helpers.queries import (
PaginatedGraphQLQuery,
QueryOptions,
Expand All @@ -13,6 +14,7 @@
DataConnectionId,
DataDifferenceType,
DataIntegrationFilters,
DataIntegrationId,
)
from kili.domain.types import ListOrTuple

Expand All @@ -21,6 +23,7 @@
compute_data_connection_difference_data_mapper,
data_connection_where_mapper,
data_integration_where_mapper,
integration_data_mapper,
)
from .operations import (
GQL_COUNT_DATA_INTEGRATIONS,
Expand All @@ -29,6 +32,7 @@
get_data_connection_query,
get_list_data_connections_query,
get_list_data_integrations_query,
get_update_integration_mutation,
get_validate_data_connection_differences_mutation,
)
from .types import (
Expand Down Expand Up @@ -123,3 +127,19 @@ def validate_data_connection_differences(
variables = {"where": {"connectionId": data_connection_id, "type": data_difference_type}}
result = self.graphql_client.execute(query, variables)
return result["data"]

def update_data_integration(
self,
data_integration_id: DataIntegrationId,
data_integration_data: DataIntegrationData,
fields: ListOrTuple[str],
) -> Dict:
"""Update a data integration."""
fragment = fragment_builder(fields)
query = get_update_integration_mutation(fragment)
variables = {
"data": integration_data_mapper(data=data_integration_data),
"where": {"id": data_integration_id},
}
result = self.graphql_client.execute(query, variables)
return result["data"]
29 changes: 29 additions & 0 deletions src/kili/adapters/kili_api_gateway/cloud_storage/mappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from typing import Dict

from kili.adapters.kili_api_gateway.cloud_storage.types import DataIntegrationData
from kili.domain.cloud_storage import DataConnectionFilters, DataIntegrationFilters

from .types import (
Expand Down Expand Up @@ -48,3 +49,31 @@ def compute_data_connection_difference_data_mapper(
"blobPaths": data.blob_paths,
"warnings": data.warnings,
}


def integration_data_mapper(data: DataIntegrationData) -> Dict:
"""."""
return {
"allowedPaths": data.allowed_paths,
"allowedProjects": data.allowed_project,
"awsAccessPointARN": data.aws_access_point_arn,
"awsRoleARN": data.aws_role_arn,
"awsRoleExternalId": data.aws_role_external_id,
"azureConnectionURL": data.azure_connection_url,
"azureIsUsingServiceCredentials": data.azure_is_using_service_credentials,
"azureSASToken": data.azure_sas_token,
"azureTenantId": data.azure_tenant_id,
"gcpBucketName": data.gcp_bucket_name,
"includeRootFiles": data.include_root_files,
"internalProcessingAuthorized": data.internal_processing_authorized,
"name": data.name,
"organizationId": data.organization_id,
"platform": data.platform,
"status": data.status,
"s3AccessKey": data.s3_access_key,
"s3BucketName": data.s3_bucket_name,
"s3Endpoint": data.s3_endpoint,
"s3Region": data.s3_region,
"s3SecretKey": data.s3_secret_key,
"s3SessionToken": data.s3_session_token,
}
11 changes: 11 additions & 0 deletions src/kili/adapters/kili_api_gateway/cloud_storage/operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,3 +80,14 @@ def get_validate_data_connection_differences_mutation(fragment: str) -> str:
data: countDataIntegrations(where: $where)
}
"""


def get_update_integration_mutation(fragment: str) -> str:
"""Return the GraphQL mutation to update a data integrations."""
return f"""
mutation UpdatePropertiesInDataIntegration($data: DataIntegrationData!, $where: DataIntegrationWhere!) {{
data: updatePropertiesInDataIntegration(data: $data, where: $where) {{
{fragment}
}}
}}
"""
35 changes: 34 additions & 1 deletion src/kili/adapters/kili_api_gateway/cloud_storage/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,12 @@
from datetime import datetime
from typing import List, Optional

from kili.domain.cloud_storage import DataIntegrationId
from kili.domain.cloud_storage import (
DataIntegrationId,
DataIntegrationPlatform,
DataIntegrationStatus,
)
from kili.domain.organization import OrganizationId
from kili.domain.project import ProjectId


Expand All @@ -25,3 +30,31 @@ class DataConnectionComputeDifferencesKiliAPIGatewayInput:

blob_paths: List[str]
warnings: List[str]


@dataclass
class DataIntegrationData:
"""Data integration input data."""

allowed_paths: Optional[List[str]]
allowed_project: Optional[List[str]]
aws_access_point_arn: Optional[str]
aws_role_arn: Optional[str]
aws_role_external_id: Optional[str]
azure_connection_url: Optional[str]
azure_is_using_service_credentials: Optional[bool]
azure_sas_token: Optional[str]
azure_tenant_id: Optional[str]
gcp_bucket_name: Optional[str]
include_root_files: Optional[str]
internal_processing_authorized: Optional[str]
name: str
organization_id: OrganizationId
platform: DataIntegrationPlatform
status: DataIntegrationStatus
s3_access_key: Optional[str]
s3_bucket_name: Optional[str]
s3_endpoint: Optional[str]
s3_region: Optional[str]
s3_secret_key: Optional[str]
s3_session_token: Optional[str]
3 changes: 1 addition & 2 deletions src/kili/domain/cloud_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@
DataIntegrationId = NewType("DataIntegrationId", str)
DataConnectionId = NewType("DataConnectionId", str)

DataIntegrationPlatform = Literal["AWS", "Azure", "GCP"]

DataIntegrationPlatform = Literal["AWS", "Azure", "GCP", "CustomS3"]

DataIntegrationStatus = Literal["CONNECTED", "DISCONNECTED", "CHECKING"]

Expand Down
80 changes: 80 additions & 0 deletions src/kili/presentation/client/cloud_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,3 +356,83 @@ def synchronize_cloud_storage_connection(
return cloud_storage_use_cases.get_data_connection(
data_connection_id=data_connection_id, fields=("numberOfAssets", "projectId")
)

@typechecked
def update_data_integration(
self,
platform: DataIntegrationPlatform,
name: str,
status: DataIntegrationStatus,
organization_id: OrganizationId,
data_integration_id: str,
allowed_paths: Optional[List[str]] = None,
allowed_project: Optional[List[str]] = None,
aws_access_point_arn: Optional[str] = None,
aws_role_arn: Optional[str] = None,
aws_role_external_id: Optional[str] = None,
azure_connection_url: Optional[str] = None,
azure_is_using_service_credentials: Optional[bool] = None,
azure_sas_token: Optional[str] = None,
azure_tenant_id: Optional[str] = None,
gcp_bucket_name: Optional[str] = None,
include_root_files: Optional[str] = None,
internal_processing_authorized: Optional[str] = None,
s3_access_key: Optional[str] = None,
s3_bucket_name: Optional[str] = None,
s3_endpoint: Optional[str] = None,
s3_region: Optional[str] = None,
s3_secret_key: Optional[str] = None,
s3_session_token: Optional[str] = None,
) -> Dict:
"""Update a data integration.
Args:
allowed_paths: List of allowed paths.
allowed_project: List of allowed projects.
aws_access_point_arn: AWS access point ARN.
aws_role_arn: AWS role ARN.
aws_role_external_id: AWS role external ID.
azure_connection_url: Azure connection URL.
azure_is_using_service_credentials: Whether Azure is using service credentials.
azure_sas_token: Azure SAS token.
azure_tenant_id: Azure tenant ID.
data_integration_id: Data integration ID.
gcp_bucket_name: GCP bucket name.
include_root_files: Whether to include root files.
internal_processing_authorized: Whether internal processing is authorized.
organization_id: Organization ID.
name: Name of the cloud storage integration.
platform: Platform of the cloud storage integration.
status: Status of the cloud storage integration.
s3_access_key: S3 access key.
s3_bucket_name: S3 bucket name.
s3_endpoint: S3 endpoint.
s3_region: S3 region.
s3_secret_key: S3 secret key.
s3_session_token: S3 session token.
"""
return CloudStorageUseCases(self.kili_api_gateway).update_data_integration(
name=name,
data_integration_id=DataIntegrationId(data_integration_id),
platform=platform,
allowed_paths=allowed_paths,
allowed_project=allowed_project,
aws_access_point_arn=aws_access_point_arn,
aws_role_arn=aws_role_arn,
aws_role_external_id=aws_role_external_id,
azure_connection_url=azure_connection_url,
azure_is_using_service_credentials=azure_is_using_service_credentials,
azure_sas_token=azure_sas_token,
azure_tenant_id=azure_tenant_id,
gcp_bucket_name=gcp_bucket_name,
include_root_files=include_root_files,
internal_processing_authorized=internal_processing_authorized,
organization_id=organization_id,
s3_access_key=s3_access_key,
s3_bucket_name=s3_bucket_name,
s3_endpoint=s3_endpoint,
s3_region=s3_region,
s3_secret_key=s3_secret_key,
s3_session_token=s3_session_token,
status=status,
)
66 changes: 66 additions & 0 deletions src/kili/use_cases/cloud_storage/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,11 @@
from tenacity.stop import stop_after_delay
from tenacity.wait import wait_exponential

from kili.adapters.kili_api_gateway.cloud_storage.mappers import integration_data_mapper
from kili.adapters.kili_api_gateway.cloud_storage.types import (
AddDataConnectionKiliAPIGatewayInput,
DataConnectionComputeDifferencesKiliAPIGatewayInput,
DataIntegrationData,
)
from kili.adapters.kili_api_gateway.helpers.queries import QueryOptions
from kili.adapters.kili_api_gateway.kili_api_gateway import KiliAPIGateway
Expand All @@ -24,8 +26,11 @@
DataDifferenceType,
DataIntegrationFilters,
DataIntegrationId,
DataIntegrationPlatform,
DataIntegrationStatus,
ProjectId,
)
from kili.domain.organization import OrganizationId
from kili.domain.types import ListOrTuple
from kili.use_cases.base import BaseUseCases

Expand Down Expand Up @@ -171,6 +176,67 @@ def synchronize_data_connection(
removed,
)

def update_data_integration(
self,
allowed_paths: Optional[List[str]],
allowed_project: Optional[List[str]],
aws_access_point_arn: Optional[str],
aws_role_arn: Optional[str],
aws_role_external_id: Optional[str],
azure_connection_url: Optional[str],
azure_is_using_service_credentials: Optional[bool],
azure_sas_token: Optional[str],
azure_tenant_id: Optional[str],
data_integration_id: DataIntegrationId,
gcp_bucket_name: Optional[str],
include_root_files: Optional[str],
internal_processing_authorized: Optional[str],
name: str,
platform: DataIntegrationPlatform,
organization_id: OrganizationId,
s3_access_key: Optional[str],
s3_bucket_name: Optional[str],
s3_endpoint: Optional[str],
s3_region: Optional[str],
s3_secret_key: Optional[str],
s3_session_token: Optional[str],
status: DataIntegrationStatus,
) -> Dict:
"""Update data integration."""
data = DataIntegrationData(
allowed_paths=allowed_paths,
allowed_project=allowed_project,
aws_access_point_arn=aws_access_point_arn,
aws_role_arn=aws_role_arn,
aws_role_external_id=aws_role_external_id,
azure_connection_url=azure_connection_url,
azure_is_using_service_credentials=azure_is_using_service_credentials,
azure_sas_token=azure_sas_token,
azure_tenant_id=azure_tenant_id,
gcp_bucket_name=gcp_bucket_name,
include_root_files=include_root_files,
internal_processing_authorized=internal_processing_authorized,
name=name,
organization_id=organization_id,
platform=platform,
status=status,
s3_access_key=s3_access_key,
s3_bucket_name=s3_bucket_name,
s3_endpoint=s3_endpoint,
s3_region=s3_region,
s3_secret_key=s3_secret_key,
s3_session_token=s3_session_token,
)
fields = tuple(
name for name, val in integration_data_mapper(data).items() if val is not None
)
if "id" not in fields:
fields += ("id",)

return self._kili_api_gateway.update_data_integration(
data_integration_id, data_integration_data=data, fields=fields
)


def _compute_differences(
data_connection_id: DataConnectionId, kili_api_gateway: KiliAPIGateway
Expand Down
12 changes: 12 additions & 0 deletions tests/e2e/test_cloud_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import pytest

from kili.client import Kili
from kili.domain.organization import OrganizationId


@pytest.fixture()
Expand Down Expand Up @@ -122,6 +123,17 @@ def test_e2e_synchronize_cloud_storage_connection(
if not len(data_integrations) == count_data_integrations == 1:
raise ValueError(f"Data integration {data_integration_id} not found. Cannot run test.")

# Update data integration
data_integration_updated = kili.update_data_integration(
data_integration_id=data_integration_id,
name="updated_name",
platform="AWS",
status="CONNECTED",
organization_id=OrganizationId("feat1-organization"),
)

assert data_integration_updated["name"] == "updated_name"

# Create a data connection
data_connection_id = kili.add_cloud_storage_connection(
project_id=project_id,
Expand Down

0 comments on commit 5bc293c

Please sign in to comment.