diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b861689..7f6085e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -55,7 +55,12 @@ jobs: provider: lxd - name: integration-k8s provider: microk8s - name: ${{ matrix.tox-environments.name }} + product: + - kafka + - mysql + - postgresql + - mongodb + name: ${{ matrix.tox-environments.name }}-${{matrix.product}} needs: - lint - unit-test @@ -91,6 +96,6 @@ jobs: echo "mark_expression=not unstable" >> $GITHUB_OUTPUT fi - name: Run integration tests - run: tox run -e ${{ matrix.tox-environments.name }} -- -m '${{ steps.select-tests.outputs.mark_expression }}' + run: tox run -e ${{ matrix.tox-environments.name }}-${{matrix.product}} -- -m '${{ steps.select-tests.outputs.mark_expression }}' env: CI_PACKED_CHARMS: ${{ needs.build.outputs.charms }} diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 6886558..6a665b4 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -28,6 +28,13 @@ async def build_charm(charm_path, bases_index: int = None) -> Path: return ops_test +@pytest.fixture(scope="module") +async def data_integrator_charm(ops_test: OpsTest) -> Path: + """Kafka charm used for integration testing.""" + charm = await ops_test.build_charm(".") + return charm + + @pytest.fixture(scope="module") async def app_charm(ops_test: OpsTest): """Build the application charm.""" diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py deleted file mode 100644 index 0e65c04..0000000 --- a/tests/integration/test_charm.py +++ /dev/null @@ -1,408 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Canonical Ltd. -# See LICENSE file for licensing details. - -import asyncio -import json -import logging -import time -from pathlib import PosixPath - -import pytest -from pytest_operator.plugin import OpsTest - -from .constants import ( - APP, - DATA_INTEGRATOR, - DATABASE_NAME, - EXTRA_USER_ROLES, - KAFKA, - MONGODB, - MYSQL, - POSTGRESQL, - TOPIC_NAME, - ZOOKEEPER, -) -from .helpers import ( - check_logs, - fetch_action_database, - fetch_action_get_credentials, - fetch_action_kafka, -) - -logger = logging.getLogger(__name__) - - -@pytest.mark.abort_on_fail -async def test_build_and_deploy(ops_test: OpsTest, app_charm: PosixPath): - data_integrator_charm = await ops_test.build_charm(".") - await asyncio.gather( - ops_test.model.deploy( - data_integrator_charm, application_name="data-integrator", num_units=1, series="jammy" - ), - ops_test.model.deploy(app_charm, application_name=APP, num_units=1, series="jammy"), - ) - await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, APP]) - assert ops_test.model.applications[DATA_INTEGRATOR].status == "blocked" - - # config database name - - config = {"database-name": DATABASE_NAME} - await ops_test.model.applications[DATA_INTEGRATOR].set_config(config) - - # test the active/waiting status for relation - await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR]) - assert ops_test.model.applications[DATA_INTEGRATOR].status == "blocked" - - -async def test_deploy_and_relate_mysql(ops_test: OpsTest): - """Test the relation with MySQL and database accessibility.""" - await asyncio.gather( - ops_test.model.deploy( - MYSQL[ops_test.cloud_name], - channel="edge", - application_name=MYSQL[ops_test.cloud_name], - num_units=1, - series="jammy", - trust=True, - ) - ) - await ops_test.model.wait_for_idle(apps=[MYSQL[ops_test.cloud_name]], wait_for_active=True) - assert ops_test.model.applications[MYSQL[ops_test.cloud_name]].status == "active" - await ops_test.model.add_relation(DATA_INTEGRATOR, MYSQL[ops_test.cloud_name]) - await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, MYSQL[ops_test.cloud_name]]) - assert ops_test.model.applications[DATA_INTEGRATOR].status == "active" - - # get credential for MYSQL - credentials = await fetch_action_get_credentials( - ops_test.model.applications[DATA_INTEGRATOR].units[0] - ) - - logger.info(f"Create table on {MYSQL[ops_test.cloud_name]}") - result = await fetch_action_database( - ops_test.model.applications[APP].units[0], - "create-table", - MYSQL[ops_test.cloud_name], - json.dumps(credentials), - DATABASE_NAME, - ) - assert result["ok"] - logger.info(f"Insert data in the table on {MYSQL[ops_test.cloud_name]}") - result = await fetch_action_database( - ops_test.model.applications[APP].units[0], - "insert-data", - MYSQL[ops_test.cloud_name], - json.dumps(credentials), - DATABASE_NAME, - ) - assert result["ok"] - logger.info(f"Check assessibility of inserted data on {MYSQL[ops_test.cloud_name]}") - result = await fetch_action_database( - ops_test.model.applications[APP].units[0], - "check-inserted-data", - MYSQL[ops_test.cloud_name], - json.dumps(credentials), - DATABASE_NAME, - ) - assert result["ok"] - # remove relation and test connection again - await ops_test.model.applications[DATA_INTEGRATOR].remove_relation( - f"{DATA_INTEGRATOR}:mysql", f"{MYSQL[ops_test.cloud_name]}:database" - ) - - await ops_test.model.wait_for_idle(apps=[MYSQL[ops_test.cloud_name], DATA_INTEGRATOR]) - await ops_test.model.add_relation(DATA_INTEGRATOR, MYSQL[ops_test.cloud_name]) - await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, MYSQL[ops_test.cloud_name]]) - - # join with another relation and check the accessibility of the previously created database - new_credentials = await fetch_action_get_credentials( - ops_test.model.applications[DATA_INTEGRATOR].units[0] - ) - - assert credentials != new_credentials - logger.info( - f"Check assessibility of inserted data on {MYSQL[ops_test.cloud_name]} with new credentials" - ) - result = await fetch_action_database( - ops_test.model.applications[APP].units[0], - "check-inserted-data", - MYSQL[ops_test.cloud_name], - json.dumps(new_credentials), - DATABASE_NAME, - ) - assert result["ok"] - - -async def test_deploy_and_relate_postgresql(ops_test: OpsTest): - """Test the relation with PostgreSQL and database accessibility.""" - await asyncio.gather( - ops_test.model.deploy( - POSTGRESQL[ops_test.cloud_name], - channel="edge", - application_name=POSTGRESQL[ops_test.cloud_name], - num_units=1, - series="jammy", - trust=True, - ) - ) - await ops_test.model.wait_for_idle( - apps=[POSTGRESQL[ops_test.cloud_name]], - wait_for_active=True, - ) - assert ops_test.model.applications[POSTGRESQL[ops_test.cloud_name]].status == "active" - await ops_test.model.add_relation(DATA_INTEGRATOR, POSTGRESQL[ops_test.cloud_name]) - await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, POSTGRESQL[ops_test.cloud_name]]) - assert ops_test.model.applications[DATA_INTEGRATOR].status == "active" - - # get credential for PostgreSQL - credentials = await fetch_action_get_credentials( - ops_test.model.applications[DATA_INTEGRATOR].units[0] - ) - logger.info(f"Create table on {POSTGRESQL[ops_test.cloud_name]}") - result = await fetch_action_database( - ops_test.model.applications[APP].units[0], - "create-table", - POSTGRESQL[ops_test.cloud_name], - json.dumps(credentials), - DATABASE_NAME, - ) - assert result["ok"] - logger.info(f"Insert data in the table on {POSTGRESQL[ops_test.cloud_name]}") - result = await fetch_action_database( - ops_test.model.applications[APP].units[0], - "insert-data", - POSTGRESQL[ops_test.cloud_name], - json.dumps(credentials), - DATABASE_NAME, - ) - assert result["ok"] - logger.info(f"Check assessibility of inserted data on {POSTGRESQL[ops_test.cloud_name]}") - result = await fetch_action_database( - ops_test.model.applications[APP].units[0], - "check-inserted-data", - POSTGRESQL[ops_test.cloud_name], - json.dumps(credentials), - DATABASE_NAME, - ) - assert result["ok"] - await ops_test.model.applications[DATA_INTEGRATOR].remove_relation( - f"{DATA_INTEGRATOR}:postgresql", f"{POSTGRESQL[ops_test.cloud_name]}:database" - ) - - await ops_test.model.wait_for_idle(apps=[POSTGRESQL[ops_test.cloud_name], DATA_INTEGRATOR]) - await ops_test.model.add_relation(DATA_INTEGRATOR, POSTGRESQL[ops_test.cloud_name]) - await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, POSTGRESQL[ops_test.cloud_name]]) - - new_credentials = await fetch_action_get_credentials( - ops_test.model.applications[DATA_INTEGRATOR].units[0] - ) - assert credentials != new_credentials - logger.info( - f"Check assessibility of inserted data on {POSTGRESQL[ops_test.cloud_name]} with new credentials" - ) - result = await fetch_action_database( - ops_test.model.applications[APP].units[0], - "check-inserted-data", - POSTGRESQL[ops_test.cloud_name], - json.dumps(new_credentials), - DATABASE_NAME, - ) - assert result["ok"] - - -@pytest.mark.skip -async def test_deploy_and_relate_mongodb(ops_test: OpsTest): - """Test the relation with MongoDB and database accessibility.""" - channel = "dpe/edge" if ops_test.cloud_name == "localhost" else "edge" - await asyncio.gather( - ops_test.model.deploy( - MONGODB[ops_test.cloud_name], - channel=channel, - application_name=MONGODB[ops_test.cloud_name], - num_units=1, - series="focal", - ) - ) - await ops_test.model.wait_for_idle(apps=[MONGODB[ops_test.cloud_name]], wait_for_active=True) - assert ops_test.model.applications[MONGODB[ops_test.cloud_name]].status == "active" - await ops_test.model.add_relation(DATA_INTEGRATOR, MONGODB[ops_test.cloud_name]) - await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, MONGODB[ops_test.cloud_name]]) - assert ops_test.model.applications[DATA_INTEGRATOR].status == "active" - - # get credential for MongoDB - credentials = await fetch_action_get_credentials( - ops_test.model.applications[DATA_INTEGRATOR].units[0] - ) - logger.info(f"Create table on {MONGODB[ops_test.cloud_name]}") - result = await fetch_action_database( - ops_test.model.applications[APP].units[0], - "create-table", - MONGODB[ops_test.cloud_name], - json.dumps(credentials), - DATABASE_NAME, - ) - assert result["ok"] - logger.info(f"Insert data in the table on {MONGODB[ops_test.cloud_name]}") - result = await fetch_action_database( - ops_test.model.applications[APP].units[0], - "insert-data", - MONGODB[ops_test.cloud_name], - json.dumps(credentials), - DATABASE_NAME, - ) - assert result["ok"] - logger.info(f"Check assessibility of inserted data on {MONGODB[ops_test.cloud_name]}") - result = await fetch_action_database( - ops_test.model.applications[APP].units[0], - "check-inserted-data", - MONGODB[ops_test.cloud_name], - json.dumps(credentials), - DATABASE_NAME, - ) - assert result["ok"] - - # drop relation and get new credential for the same collection - await ops_test.model.applications[DATA_INTEGRATOR].remove_relation( - f"{DATA_INTEGRATOR}:mongodb", f"{MONGODB[ops_test.cloud_name]}:database" - ) - - await ops_test.model.wait_for_idle(apps=[MONGODB[ops_test.cloud_name], DATA_INTEGRATOR]) - await ops_test.model.add_relation(DATA_INTEGRATOR, MONGODB[ops_test.cloud_name]) - await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, MONGODB[ops_test.cloud_name]]) - - new_credentials = await fetch_action_get_credentials( - ops_test.model.applications[DATA_INTEGRATOR].units[0] - ) - - # test that different credentials are provided - assert credentials != new_credentials - - logger.info( - f"Check assessibility of inserted data on {MONGODB[ops_test.cloud_name]} with new credentials" - ) - result = await fetch_action_database( - ops_test.model.applications[APP].units[0], - "check-inserted-data", - MONGODB[ops_test.cloud_name], - json.dumps(new_credentials), - DATABASE_NAME, - ) - assert result["ok"] - - await ops_test.model.applications[DATA_INTEGRATOR].remove_relation( - f"{DATA_INTEGRATOR}:mongodb", f"{MONGODB[ops_test.cloud_name]}:database" - ) - - await ops_test.model.wait_for_idle(apps=[MONGODB[ops_test.cloud_name], DATA_INTEGRATOR]) - - -@pytest.mark.abort_on_fail -async def test_deploy_and_relate_kafka(ops_test: OpsTest): - """Test the relation with Kafka and the correct production and consumption of messagges.""" - await asyncio.gather( - ops_test.model.deploy( - ZOOKEEPER[ops_test.cloud_name], - channel="edge", - application_name=ZOOKEEPER[ops_test.cloud_name], - num_units=1, - series="jammy", - ), - ops_test.model.deploy( - KAFKA[ops_test.cloud_name], - channel="edge", - application_name=KAFKA[ops_test.cloud_name], - num_units=1, - series="jammy", - ), - ) - - await ops_test.model.wait_for_idle(apps=[ZOOKEEPER[ops_test.cloud_name]], timeout=1000) - await ops_test.model.wait_for_idle( - apps=[KAFKA[ops_test.cloud_name]], timeout=1000, status="waiting" - ) - time.sleep(10) - assert ops_test.model.applications[KAFKA[ops_test.cloud_name]].status == "waiting" - assert ops_test.model.applications[ZOOKEEPER[ops_test.cloud_name]].status == "active" - - await ops_test.model.add_relation(KAFKA[ops_test.cloud_name], ZOOKEEPER[ops_test.cloud_name]) - await ops_test.model.wait_for_idle( - apps=[KAFKA[ops_test.cloud_name], ZOOKEEPER[ops_test.cloud_name]], status="active" - ) - assert ops_test.model.applications[KAFKA[ops_test.cloud_name]].status == "active" - assert ops_test.model.applications[ZOOKEEPER[ops_test.cloud_name]].status == "active" - - # configure topic and extra-user-roles - config = {"topic-name": TOPIC_NAME, "extra-user-roles": EXTRA_USER_ROLES} - await ops_test.model.applications[DATA_INTEGRATOR].set_config(config) - - # test the active/waiting status for relation - await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR]) - await ops_test.model.wait_for_idle(apps=[KAFKA[ops_test.cloud_name], DATA_INTEGRATOR]) - await ops_test.model.add_relation(KAFKA[ops_test.cloud_name], DATA_INTEGRATOR) - await ops_test.model.wait_for_idle( - apps=[KAFKA[ops_test.cloud_name], ZOOKEEPER[ops_test.cloud_name], DATA_INTEGRATOR] - ) - time.sleep(10) - assert ops_test.model.applications[KAFKA[ops_test.cloud_name]].status == "active" - assert ops_test.model.applications[DATA_INTEGRATOR].status == "active" - await ops_test.model.wait_for_idle( - apps=[KAFKA[ops_test.cloud_name], ZOOKEEPER[ops_test.cloud_name], DATA_INTEGRATOR] - ) - - # get credential for Kafka - credentials = await fetch_action_get_credentials( - ops_test.model.applications[DATA_INTEGRATOR].units[0] - ) - - logger.info("Create topic") - await fetch_action_kafka( - ops_test.model.applications[APP].units[0], - "create-topic", - KAFKA[ops_test.cloud_name], - json.dumps(credentials), - TOPIC_NAME, - ) - - logger.info("Produce messages") - await fetch_action_kafka( - ops_test.model.applications[APP].units[0], - "produce-messages", - KAFKA[ops_test.cloud_name], - json.dumps(credentials), - TOPIC_NAME, - ) - logger.info("Check messages in logs") - check_logs( - model_full_name=ops_test.model_full_name, - kafka_unit_name=f"{KAFKA[ops_test.cloud_name]}/0", - topic=TOPIC_NAME, - ) - - await ops_test.model.applications[DATA_INTEGRATOR].remove_relation( - f"{DATA_INTEGRATOR}:kafka", f"{KAFKA[ops_test.cloud_name]}:kafka-client" - ) - await ops_test.model.wait_for_idle(apps=[KAFKA[ops_test.cloud_name], DATA_INTEGRATOR]) - - await ops_test.model.add_relation(DATA_INTEGRATOR, KAFKA[ops_test.cloud_name]) - await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, KAFKA[ops_test.cloud_name]]) - - new_credentials = await fetch_action_get_credentials( - ops_test.model.applications[DATA_INTEGRATOR].units[0] - ) - - # test that different credentials are provided - assert credentials != new_credentials - logger.info("Produce messages") - await fetch_action_kafka( - ops_test.model.applications[APP].units[0], - "produce-messages", - KAFKA[ops_test.cloud_name], - json.dumps(new_credentials), - TOPIC_NAME, - ) - logger.info("Check messages in logs") - check_logs( - model_full_name=ops_test.model_full_name, - kafka_unit_name=f"{KAFKA[ops_test.cloud_name]}/0", - topic=TOPIC_NAME, - ) diff --git a/tests/integration/test_kafka.py b/tests/integration/test_kafka.py new file mode 100644 index 0000000..2fdaccf --- /dev/null +++ b/tests/integration/test_kafka.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +import asyncio +import json +import logging +from pathlib import PosixPath + +import pytest +from pytest_operator.plugin import OpsTest + +from .constants import ( + APP, + DATA_INTEGRATOR, + EXTRA_USER_ROLES, + KAFKA, + TOPIC_NAME, + ZOOKEEPER, +) +from .helpers import check_logs, fetch_action_get_credentials, fetch_action_kafka + +logger = logging.getLogger(__name__) + + +@pytest.mark.abort_on_fail +async def test_deploy(ops_test: OpsTest, app_charm: PosixPath, data_integrator_charm: PosixPath): + await asyncio.gather( + ops_test.model.deploy( + data_integrator_charm, application_name="data-integrator", num_units=1, series="jammy" + ), + ops_test.model.deploy(app_charm, application_name=APP, num_units=1, series="jammy"), + ) + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, APP]) + assert ops_test.model.applications[DATA_INTEGRATOR].status == "blocked" + + config = {"topic-name": TOPIC_NAME, "extra-user-roles": EXTRA_USER_ROLES} + await ops_test.model.applications[DATA_INTEGRATOR].set_config(config) + + # test the active/waiting status for relation + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR]) + assert ops_test.model.applications[DATA_INTEGRATOR].status == "blocked" + + +@pytest.mark.abort_on_fail +async def test_deploy_and_relate_kafka(ops_test: OpsTest): + """Test the relation with Kafka and the correct production and consumption of messagges.""" + await asyncio.gather( + ops_test.model.deploy( + ZOOKEEPER[ops_test.cloud_name], + channel="edge", + application_name=ZOOKEEPER[ops_test.cloud_name], + num_units=1, + series="jammy", + ), + ops_test.model.deploy( + KAFKA[ops_test.cloud_name], + channel="edge", + application_name=KAFKA[ops_test.cloud_name], + num_units=1, + series="jammy", + ), + ) + + status = "blocked" if ops_test.cloud_name == "localhost" else "waiting" + + await ops_test.model.wait_for_idle( + apps=[ZOOKEEPER[ops_test.cloud_name]], timeout=1000, status="active" + ) + await ops_test.model.wait_for_idle( + apps=[KAFKA[ops_test.cloud_name]], timeout=1000, status=status + ) + + await ops_test.model.add_relation(KAFKA[ops_test.cloud_name], ZOOKEEPER[ops_test.cloud_name]) + async with ops_test.fast_forward(): + await ops_test.model.wait_for_idle( + apps=[KAFKA[ops_test.cloud_name], ZOOKEEPER[ops_test.cloud_name]], status="active" + ) + + await ops_test.model.add_relation(KAFKA[ops_test.cloud_name], DATA_INTEGRATOR) + await ops_test.model.wait_for_idle( + apps=[KAFKA[ops_test.cloud_name], ZOOKEEPER[ops_test.cloud_name], DATA_INTEGRATOR], + timeout=2000, + idle_period=60, + status="active", + ) + + # get credential for Kafka + credentials = await fetch_action_get_credentials( + ops_test.model.applications[DATA_INTEGRATOR].units[0] + ) + + logger.info("Create topic") + await fetch_action_kafka( + ops_test.model.applications[APP].units[0], + "create-topic", + KAFKA[ops_test.cloud_name], + json.dumps(credentials), + TOPIC_NAME, + ) + + logger.info("Produce messages") + await fetch_action_kafka( + ops_test.model.applications[APP].units[0], + "produce-messages", + KAFKA[ops_test.cloud_name], + json.dumps(credentials), + TOPIC_NAME, + ) + logger.info("Check messages in logs") + check_logs( + model_full_name=ops_test.model_full_name, + kafka_unit_name=f"{KAFKA[ops_test.cloud_name]}/0", + topic=TOPIC_NAME, + ) + + await ops_test.model.applications[DATA_INTEGRATOR].remove_relation( + f"{DATA_INTEGRATOR}:kafka", f"{KAFKA[ops_test.cloud_name]}:kafka-client" + ) + await ops_test.model.wait_for_idle(apps=[KAFKA[ops_test.cloud_name], DATA_INTEGRATOR]) + + await ops_test.model.add_relation(DATA_INTEGRATOR, KAFKA[ops_test.cloud_name]) + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, KAFKA[ops_test.cloud_name]]) + + new_credentials = await fetch_action_get_credentials( + ops_test.model.applications[DATA_INTEGRATOR].units[0] + ) + + # test that different credentials are provided + assert credentials != new_credentials + logger.info("Produce messages") + await fetch_action_kafka( + ops_test.model.applications[APP].units[0], + "produce-messages", + KAFKA[ops_test.cloud_name], + json.dumps(new_credentials), + TOPIC_NAME, + ) + logger.info("Check messages in logs") + check_logs( + model_full_name=ops_test.model_full_name, + kafka_unit_name=f"{KAFKA[ops_test.cloud_name]}/0", + topic=TOPIC_NAME, + ) diff --git a/tests/integration/test_mongodb.py b/tests/integration/test_mongodb.py new file mode 100644 index 0000000..e2acbac --- /dev/null +++ b/tests/integration/test_mongodb.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python3 +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +import asyncio +import json +import logging +from pathlib import PosixPath + +import pytest +from pytest_operator.plugin import OpsTest + +from .constants import APP, DATA_INTEGRATOR, DATABASE_NAME, MONGODB +from .helpers import fetch_action_database, fetch_action_get_credentials + +logger = logging.getLogger(__name__) + + +@pytest.mark.abort_on_fail +async def test_deploy(ops_test: OpsTest, app_charm: PosixPath, data_integrator_charm: PosixPath): + await asyncio.gather( + ops_test.model.deploy( + data_integrator_charm, application_name="data-integrator", num_units=1, series="jammy" + ), + ops_test.model.deploy(app_charm, application_name=APP, num_units=1, series="jammy"), + ) + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, APP]) + assert ops_test.model.applications[DATA_INTEGRATOR].status == "blocked" + + # config database name + + config = {"database-name": DATABASE_NAME} + await ops_test.model.applications[DATA_INTEGRATOR].set_config(config) + + # test the active/waiting status for relation + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR]) + assert ops_test.model.applications[DATA_INTEGRATOR].status == "blocked" + + +@pytest.mark.skip +async def test_deploy_and_relate_mongodb(ops_test: OpsTest): + """Test the relation with MongoDB and database accessibility.""" + channel = "dpe/edge" if ops_test.cloud_name == "localhost" else "edge" + await asyncio.gather( + ops_test.model.deploy( + MONGODB[ops_test.cloud_name], + channel=channel, + application_name=MONGODB[ops_test.cloud_name], + num_units=1, + series="focal", + ) + ) + await ops_test.model.wait_for_idle(apps=[MONGODB[ops_test.cloud_name]], wait_for_active=True) + assert ops_test.model.applications[MONGODB[ops_test.cloud_name]].status == "active" + await ops_test.model.add_relation(DATA_INTEGRATOR, MONGODB[ops_test.cloud_name]) + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, MONGODB[ops_test.cloud_name]]) + assert ops_test.model.applications[DATA_INTEGRATOR].status == "active" + + # get credential for MongoDB + credentials = await fetch_action_get_credentials( + ops_test.model.applications[DATA_INTEGRATOR].units[0] + ) + logger.info(f"Create table on {MONGODB[ops_test.cloud_name]}") + result = await fetch_action_database( + ops_test.model.applications[APP].units[0], + "create-table", + MONGODB[ops_test.cloud_name], + json.dumps(credentials), + DATABASE_NAME, + ) + assert result["ok"] + logger.info(f"Insert data in the table on {MONGODB[ops_test.cloud_name]}") + result = await fetch_action_database( + ops_test.model.applications[APP].units[0], + "insert-data", + MONGODB[ops_test.cloud_name], + json.dumps(credentials), + DATABASE_NAME, + ) + assert result["ok"] + logger.info(f"Check assessibility of inserted data on {MONGODB[ops_test.cloud_name]}") + result = await fetch_action_database( + ops_test.model.applications[APP].units[0], + "check-inserted-data", + MONGODB[ops_test.cloud_name], + json.dumps(credentials), + DATABASE_NAME, + ) + assert result["ok"] + + # drop relation and get new credential for the same collection + await ops_test.model.applications[DATA_INTEGRATOR].remove_relation( + f"{DATA_INTEGRATOR}:mongodb", f"{MONGODB[ops_test.cloud_name]}:database" + ) + + await ops_test.model.wait_for_idle(apps=[MONGODB[ops_test.cloud_name], DATA_INTEGRATOR]) + await ops_test.model.add_relation(DATA_INTEGRATOR, MONGODB[ops_test.cloud_name]) + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, MONGODB[ops_test.cloud_name]]) + + new_credentials = await fetch_action_get_credentials( + ops_test.model.applications[DATA_INTEGRATOR].units[0] + ) + + # test that different credentials are provided + assert credentials != new_credentials + + logger.info( + f"Check assessibility of inserted data on {MONGODB[ops_test.cloud_name]} with new credentials" + ) + result = await fetch_action_database( + ops_test.model.applications[APP].units[0], + "check-inserted-data", + MONGODB[ops_test.cloud_name], + json.dumps(new_credentials), + DATABASE_NAME, + ) + assert result["ok"] + + await ops_test.model.applications[DATA_INTEGRATOR].remove_relation( + f"{DATA_INTEGRATOR}:mongodb", f"{MONGODB[ops_test.cloud_name]}:database" + ) + + await ops_test.model.wait_for_idle(apps=[MONGODB[ops_test.cloud_name], DATA_INTEGRATOR]) diff --git a/tests/integration/test_mysql.py b/tests/integration/test_mysql.py new file mode 100644 index 0000000..bff838e --- /dev/null +++ b/tests/integration/test_mysql.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python3 +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +import asyncio +import json +import logging +from pathlib import PosixPath + +import pytest +from pytest_operator.plugin import OpsTest + +from .constants import APP, DATA_INTEGRATOR, DATABASE_NAME, MYSQL +from .helpers import fetch_action_database, fetch_action_get_credentials + +logger = logging.getLogger(__name__) + + +@pytest.mark.abort_on_fail +async def test_deploy(ops_test: OpsTest, app_charm: PosixPath, data_integrator_charm: PosixPath): + await asyncio.gather( + ops_test.model.deploy( + data_integrator_charm, application_name="data-integrator", num_units=1, series="jammy" + ), + ops_test.model.deploy(app_charm, application_name=APP, num_units=1, series="jammy"), + ) + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, APP]) + assert ops_test.model.applications[DATA_INTEGRATOR].status == "blocked" + + # config database name + + config = {"database-name": DATABASE_NAME} + await ops_test.model.applications[DATA_INTEGRATOR].set_config(config) + + # test the active/waiting status for relation + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR]) + assert ops_test.model.applications[DATA_INTEGRATOR].status == "blocked" + + +async def test_deploy_and_relate_mysql(ops_test: OpsTest): + """Test the relation with MySQL and database accessibility.""" + await asyncio.gather( + ops_test.model.deploy( + MYSQL[ops_test.cloud_name], + channel="edge", + application_name=MYSQL[ops_test.cloud_name], + num_units=1, + series="jammy", + trust=True, + ) + ) + await ops_test.model.wait_for_idle(apps=[MYSQL[ops_test.cloud_name]], wait_for_active=True) + assert ops_test.model.applications[MYSQL[ops_test.cloud_name]].status == "active" + await ops_test.model.add_relation(DATA_INTEGRATOR, MYSQL[ops_test.cloud_name]) + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, MYSQL[ops_test.cloud_name]]) + assert ops_test.model.applications[DATA_INTEGRATOR].status == "active" + + # get credential for MYSQL + credentials = await fetch_action_get_credentials( + ops_test.model.applications[DATA_INTEGRATOR].units[0] + ) + + logger.info(f"Create table on {MYSQL[ops_test.cloud_name]}") + result = await fetch_action_database( + ops_test.model.applications[APP].units[0], + "create-table", + MYSQL[ops_test.cloud_name], + json.dumps(credentials), + DATABASE_NAME, + ) + assert result["ok"] + logger.info(f"Insert data in the table on {MYSQL[ops_test.cloud_name]}") + result = await fetch_action_database( + ops_test.model.applications[APP].units[0], + "insert-data", + MYSQL[ops_test.cloud_name], + json.dumps(credentials), + DATABASE_NAME, + ) + assert result["ok"] + logger.info(f"Check assessibility of inserted data on {MYSQL[ops_test.cloud_name]}") + result = await fetch_action_database( + ops_test.model.applications[APP].units[0], + "check-inserted-data", + MYSQL[ops_test.cloud_name], + json.dumps(credentials), + DATABASE_NAME, + ) + assert result["ok"] + # remove relation and test connection again + await ops_test.model.applications[DATA_INTEGRATOR].remove_relation( + f"{DATA_INTEGRATOR}:mysql", f"{MYSQL[ops_test.cloud_name]}:database" + ) + + await ops_test.model.wait_for_idle(apps=[MYSQL[ops_test.cloud_name], DATA_INTEGRATOR]) + await ops_test.model.add_relation(DATA_INTEGRATOR, MYSQL[ops_test.cloud_name]) + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, MYSQL[ops_test.cloud_name]]) + + # join with another relation and check the accessibility of the previously created database + new_credentials = await fetch_action_get_credentials( + ops_test.model.applications[DATA_INTEGRATOR].units[0] + ) + + assert credentials != new_credentials + logger.info( + f"Check assessibility of inserted data on {MYSQL[ops_test.cloud_name]} with new credentials" + ) + result = await fetch_action_database( + ops_test.model.applications[APP].units[0], + "check-inserted-data", + MYSQL[ops_test.cloud_name], + json.dumps(new_credentials), + DATABASE_NAME, + ) + assert result["ok"] diff --git a/tests/integration/test_postgresql.py b/tests/integration/test_postgresql.py new file mode 100644 index 0000000..76fca38 --- /dev/null +++ b/tests/integration/test_postgresql.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +import asyncio +import json +import logging +from pathlib import PosixPath + +import pytest +from pytest_operator.plugin import OpsTest + +from .constants import APP, DATA_INTEGRATOR, DATABASE_NAME, POSTGRESQL +from .helpers import fetch_action_database, fetch_action_get_credentials + +logger = logging.getLogger(__name__) + + +@pytest.mark.abort_on_fail +async def test_deploy(ops_test: OpsTest, app_charm: PosixPath, data_integrator_charm: PosixPath): + await asyncio.gather( + ops_test.model.deploy( + data_integrator_charm, application_name="data-integrator", num_units=1, series="jammy" + ), + ops_test.model.deploy(app_charm, application_name=APP, num_units=1, series="jammy"), + ) + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, APP]) + assert ops_test.model.applications[DATA_INTEGRATOR].status == "blocked" + + # config database name + + config = {"database-name": DATABASE_NAME} + await ops_test.model.applications[DATA_INTEGRATOR].set_config(config) + + # test the active/waiting status for relation + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR]) + assert ops_test.model.applications[DATA_INTEGRATOR].status == "blocked" + + +async def test_deploy_and_relate_postgresql(ops_test: OpsTest): + """Test the relation with PostgreSQL and database accessibility.""" + await asyncio.gather( + ops_test.model.deploy( + POSTGRESQL[ops_test.cloud_name], + channel="edge", + application_name=POSTGRESQL[ops_test.cloud_name], + num_units=1, + series="jammy", + trust=True, + ) + ) + await ops_test.model.wait_for_idle( + apps=[POSTGRESQL[ops_test.cloud_name]], + wait_for_active=True, + ) + assert ops_test.model.applications[POSTGRESQL[ops_test.cloud_name]].status == "active" + await ops_test.model.add_relation(DATA_INTEGRATOR, POSTGRESQL[ops_test.cloud_name]) + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, POSTGRESQL[ops_test.cloud_name]]) + assert ops_test.model.applications[DATA_INTEGRATOR].status == "active" + + # get credential for PostgreSQL + credentials = await fetch_action_get_credentials( + ops_test.model.applications[DATA_INTEGRATOR].units[0] + ) + logger.info(f"Create table on {POSTGRESQL[ops_test.cloud_name]}") + result = await fetch_action_database( + ops_test.model.applications[APP].units[0], + "create-table", + POSTGRESQL[ops_test.cloud_name], + json.dumps(credentials), + DATABASE_NAME, + ) + assert result["ok"] + logger.info(f"Insert data in the table on {POSTGRESQL[ops_test.cloud_name]}") + result = await fetch_action_database( + ops_test.model.applications[APP].units[0], + "insert-data", + POSTGRESQL[ops_test.cloud_name], + json.dumps(credentials), + DATABASE_NAME, + ) + assert result["ok"] + logger.info(f"Check assessibility of inserted data on {POSTGRESQL[ops_test.cloud_name]}") + result = await fetch_action_database( + ops_test.model.applications[APP].units[0], + "check-inserted-data", + POSTGRESQL[ops_test.cloud_name], + json.dumps(credentials), + DATABASE_NAME, + ) + assert result["ok"] + await ops_test.model.applications[DATA_INTEGRATOR].remove_relation( + f"{DATA_INTEGRATOR}:postgresql", f"{POSTGRESQL[ops_test.cloud_name]}:database" + ) + + await ops_test.model.wait_for_idle(apps=[POSTGRESQL[ops_test.cloud_name], DATA_INTEGRATOR]) + await ops_test.model.add_relation(DATA_INTEGRATOR, POSTGRESQL[ops_test.cloud_name]) + await ops_test.model.wait_for_idle(apps=[DATA_INTEGRATOR, POSTGRESQL[ops_test.cloud_name]]) + + new_credentials = await fetch_action_get_credentials( + ops_test.model.applications[DATA_INTEGRATOR].units[0] + ) + assert credentials != new_credentials + logger.info( + f"Check assessibility of inserted data on {POSTGRESQL[ops_test.cloud_name]} with new credentials" + ) + result = await fetch_action_database( + ops_test.model.applications[APP].units[0], + "check-inserted-data", + POSTGRESQL[ops_test.cloud_name], + json.dumps(new_credentials), + DATABASE_NAME, + ) + assert result["ok"] diff --git a/tox.ini b/tox.ini index 9e9f2a8..7cea7ae 100644 --- a/tox.ini +++ b/tox.ini @@ -13,17 +13,25 @@ lib_path = {tox_root}/lib all_path = {[vars]src_path} {[vars]tests_path} {[vars]lib_path} [testenv] -set_env = - PYTHONPATH = {tox_root}/lib:{[vars]src_path} +allowlist_externals = poetry +setenv = + PYTHONPATH={tox_root}/lib:{[vars]src_path} PYTHONBREAKPOINT=ipdb.set_trace PY_COLORS=1 + lxd: CLOUD=localhost + k8s: CLOUD=microk8s + kafka: TEST_FILE=test_kafka.py + mysql: TEST_FILE=test_mysql.py + mongodb: TEST_FILE=test_mongodb.py + postgresql: TEST_FILE=test_postgresql.py pass_env = + CI + CI_PACKED_CHARMS PYTHONPATH CHARM_BUILD_DIR MODEL_SETTINGS [testenv:format] -allowlist_externals = poetry description = Apply coding style standards to code commands = poetry install @@ -31,7 +39,6 @@ commands = poetry run black {[vars]all_path} [testenv:lint] -allowlist_externals = poetry description = Check code against coding style standards commands = poetry install @@ -45,7 +52,6 @@ commands = poetry run black --check --diff {[vars]all_path} [testenv:unit] -allowlist_externals = poetry description = Run unit tests commands = poetry install @@ -54,28 +60,11 @@ commands = -m pytest -v --tb native -s {posargs} {[vars]tests_path}/unit poetry run coverage report -[testenv:integration-lxd] -allowlist_externals = poetry -description = Run integration tests -pass_env = - {[testenv]pass_env} - CI - CI_PACKED_CHARMS -commands = - poetry install --with dev - poetry export -f requirements.txt -o requirements.txt - poetry run pytest -v --tb native --log-cli-level=INFO -s --cloud=localhost {posargs} {[vars]tests_path}/integration/ - -[testenv:integration-k8s] -allowlist_externals = poetry +[testenv:integration-{lxd, k8s}-{kafka,mysql,mongodb,postgresql}] description = Run integration tests -pass_env = - {[testenv]pass_env} - CI - CI_PACKED_CHARMS commands = - poetry install --with dev + poetry install --with dev --no-cache poetry export -f requirements.txt -o requirements.txt - poetry run pytest -v --tb native --log-cli-level=INFO -s --cloud=microk8s {posargs} {[vars]tests_path}/integration/ + poetry run pytest -v --tb native --log-cli-level=INFO -s --cloud={env:CLOUD} {posargs} {[vars]tests_path}/integration/{env:TEST_FILE}