From 4285491394fca2fa80f3f24bf47d5e4d6ccb330a Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 24 Sep 2024 17:03:58 +0200 Subject: [PATCH] Replaced test/utils/parameters.py --- tests/deployment/test_scripts_deployer.py | 4 -- tests/deployment/test_scripts_deployer_cli.py | 12 +++-- .../with_db/test_query_loop_integration.py | 1 - tests/utils/parameters.py | 46 ------------------- 4 files changed, 7 insertions(+), 56 deletions(-) delete mode 100644 tests/utils/parameters.py diff --git a/tests/deployment/test_scripts_deployer.py b/tests/deployment/test_scripts_deployer.py index f1785211..b7039e17 100644 --- a/tests/deployment/test_scripts_deployer.py +++ b/tests/deployment/test_scripts_deployer.py @@ -1,7 +1,6 @@ from exasol_advanced_analytics_framework.deployment.scripts_deployer import \ ScriptsDeployer from tests.utils.db_queries import DBQueries -from tests.utils.parameters import db_params def test_scripts_deployer(deployed_slc, language_alias, pyexasol_connection, request): @@ -9,9 +8,6 @@ def test_scripts_deployer(deployed_slc, language_alias, pyexasol_connection, req pyexasol_connection.execute(f"DROP SCHEMA IF EXISTS {schema_name} CASCADE;") ScriptsDeployer.run2( pyexasol_connection, - # dsn=db_params.address(), - # user=db_params.user, - # password=db_params.password, schema=schema_name, language_alias=language_alias, develop=True) diff --git a/tests/deployment/test_scripts_deployer_cli.py b/tests/deployment/test_scripts_deployer_cli.py index 44f3d0cc..fdfd0160 100644 --- a/tests/deployment/test_scripts_deployer_cli.py +++ b/tests/deployment/test_scripts_deployer_cli.py @@ -1,20 +1,22 @@ from click.testing import CliRunner from exasol_advanced_analytics_framework import deploy from tests.utils.db_queries import DBQueries -from tests.utils.parameters import db_params from exasol_advanced_analytics_framework.slc import LANGUAGE_ALIAS -@pytest.mark.skip(reason="No need to test deployer provided by PEC.") def test_scripts_deployer_cli(upload_language_container, + backend_aware_database_params, pyexasol_connection, request): schema_name = request.node.name pyexasol_connection.execute(f"DROP SCHEMA IF EXISTS {schema_name} CASCADE;") + dsn = backend_aware_database_params["dsn"] + user = backend_aware_database_params["user"] + password = backend_aware_database_params["password"] args_list = [ "scripts", - "--dsn", db_params.address(), - "--user", db_params.user, - "--pass", db_params.password, + "--dsn", dns, + "--user", user, + "--pass", password, "--schema", schema_name, "--language-alias", LANGUAGE_ALIAS ] diff --git a/tests/integration_tests/with_db/test_query_loop_integration.py b/tests/integration_tests/with_db/test_query_loop_integration.py index 935d6d25..3a41e300 100644 --- a/tests/integration_tests/with_db/test_query_loop_integration.py +++ b/tests/integration_tests/with_db/test_query_loop_integration.py @@ -7,7 +7,6 @@ from tests.test_package.test_query_handlers.query_handler_test import \ FINAL_RESULT, QUERY_LIST, TEST_INPUT -from tests.utils.parameters import db_params QUERY_FLUSH_STATS = """FLUSH STATISTICS""" QUERY_AUDIT_LOGS = """ diff --git a/tests/utils/parameters.py b/tests/utils/parameters.py deleted file mode 100644 index 6051c59c..00000000 --- a/tests/utils/parameters.py +++ /dev/null @@ -1,46 +0,0 @@ -from dataclasses import dataclass - - -@dataclass(frozen=True) -class DBParams: - host: str - port: str - user: str - password: str - - def address(self) -> str: - return f"{self.host}:{self.port}" - - -@dataclass(frozen=True) -class BucketFSParams: - host: str - port: str - real_port: str - user: str - password: str - name: str - bucket: str - path_in_bucket: str - - def address(self, port=None) -> str: - port = self.port if not port else port - return f"http://{self.host}:{port}/{self.bucket}/" \ - f"{self.path_in_bucket};{self.name}" - - -db_params = DBParams( - host="127.0.0.1", - port="8888", - user="sys", - password="exasol") - -bucketfs_params = BucketFSParams( - host="127.0.0.1", - port="6666", - real_port="2580", - user="w", - password="write", - name="bfsdefault", - bucket="default", - path_in_bucket="container")