diff --git a/.github/workflows/tests-integration-db.yml b/.github/workflows/tests-integration-db.yml index 75bd4569..b5bec3f5 100644 --- a/.github/workflows/tests-integration-db.yml +++ b/.github/workflows/tests-integration-db.yml @@ -44,13 +44,7 @@ jobs: - name: Poetry install run: poetry run -- nox -s run_in_dev_env -- poetry install - - name: Build language container - run: poetry run -- nox -s build_language_container - - - name: Start test environment - run: poetry run -- nox -s start_integration_test_environment - - name: Run Python integration tests with db - run: poetry run -- nox -s run_python_integration_tests_with_db + run: poetry run -- nox -s run_python_integration_tests_with_db -- -- --setup-show --backend=onprem env: PYTEST_ADDOPTS: '-o log_cli=true -o log_cli_level=INFO ${{ steps.pytest-markers.outputs.slow-tests }}' diff --git a/.gitignore b/.gitignore index 205ddfb9..7736b058 100644 --- a/.gitignore +++ b/.gitignore @@ -161,3 +161,6 @@ doc/_build # emacs TAGS + +# locally build script language container files +.slc \ No newline at end of file diff --git a/doc/changes/changes_0.1.0.md b/doc/changes/changes_0.1.0.md index a47608d1..9454c4e6 100644 --- a/doc/changes/changes_0.1.0.md +++ b/doc/changes/changes_0.1.0.md @@ -44,7 +44,27 @@ Code name: * #95: Remove setup.py * #114: Refactored BackgroundPeerState and introduced parameter objects * #173: Introduced Python Toolbox +* #174: Replaced Language Container Stuff with PEC and SLC plugin +* #183: Fixed warning on tests with `__init__` constructor +* #184: Updated micromamba to the latest version 2.0.0 ### Documentation * #9: Added README file + +## Dependency Updates + +Compared to `main` branch this release updates the following dependencies: + +### File `pyproject.toml` + +* Updated dependency `exasol-bucketfs:0.8.0` to `0.13.0` +* Updated dependency `pyexasol:0.25.2` to `0.27.0` +* Updated dependency `typeguard:2.13.3` to `4.3.0` +* Updated dependency `exasol-integration-test-docker-environment:3.1.0` to `3.2.0` +* Updated dependency `polyfactory:2.16.2` to `2.17.0` +* Added dependency `exasol-python-extension-common:0.6.0` +* Added dependency `exasol-script-languages-container-tool:1.0.0` +* Added dependency `pytest-exasol-slc:0.3.0` +* Added dependency `pytest-exasol-backend:0.3.0` +* Added dependency `pytest-exasol-extension:0.1.0` diff --git a/exasol_advanced_analytics_framework/deploy.py b/exasol_advanced_analytics_framework/deploy.py index a44ac9da..2e723d12 100644 --- a/exasol_advanced_analytics_framework/deploy.py +++ b/exasol_advanced_analytics_framework/deploy.py @@ -1,9 +1,18 @@ import logging import click -from exasol_advanced_analytics_framework.deployment. \ - language_container_deployer_cli import language_container_deployer_main -from exasol_advanced_analytics_framework.deployment. \ - scripts_deployer_cli import scripts_deployer_main +from exasol_advanced_analytics_framework.slc import ( + SLC_FILE_NAME, + SLC_URL_FORMATTER, +) +from exasol_advanced_analytics_framework.deployment import ( + scripts_deployer_cli, + language_container_deployer_cli, +) +from exasol.python_extension_common.deployment.language_container_deployer_cli import ( + language_container_deployer_main, + slc_parameter_formatters, + CustomizableParameters, +) @click.group() @@ -11,8 +20,12 @@ def main(): pass -main.add_command(scripts_deployer_main) -main.add_command(language_container_deployer_main) +slc_parameter_formatters.set_formatter(CustomizableParameters.container_url, SLC_URL_FORMATTER) +slc_parameter_formatters.set_formatter(CustomizableParameters.container_name, SLC_FILE_NAME) + +main.add_command(scripts_deployer_cli.scripts_deployer_main) +main.add_command(language_container_deployer_cli.language_container_deployer_main) + if __name__ == '__main__': logging.basicConfig( diff --git a/exasol_advanced_analytics_framework/deployment/language_container_deployer.py b/exasol_advanced_analytics_framework/deployment/language_container_deployer.py deleted file mode 100644 index cf93fa2f..00000000 --- a/exasol_advanced_analytics_framework/deployment/language_container_deployer.py +++ /dev/null @@ -1,111 +0,0 @@ -import pyexasol -from typing import List -from pathlib import Path, PurePosixPath -from exasol_bucketfs_utils_python.bucket_config import BucketConfig -from exasol_bucketfs_utils_python.bucketfs_config import BucketFSConfig -from exasol_bucketfs_utils_python.bucketfs_location import BucketFSLocation -from exasol_bucketfs_utils_python.bucketfs_connection_config import \ - BucketFSConnectionConfig -import logging -logger = logging.getLogger(__name__) - - -class LanguageContainerDeployer: - def __init__(self, - pyexasol_connection: pyexasol.ExaConnection, - language_alias: str, - bucketfs_location: BucketFSLocation, - container_file: Path): - self._container_file = container_file - self._bucketfs_location = bucketfs_location - self._language_alias = language_alias - self._pyexasol_conn = pyexasol_connection - logger.debug(f"Init {LanguageContainerDeployer.__name__}") - - def deploy_container(self): - path_in_udf = self._upload_container() - for alter in ["SESSION", "SYSTEM"]: - alter_command = self._generate_alter_command(alter, path_in_udf) - self._pyexasol_conn.execute(alter_command) - logging.debug(alter_command) - - def _upload_container(self) -> PurePosixPath: - if not self._container_file.is_file(): - raise RuntimeError(f"Container file {self._container_file} " - f"is not a file.") - with open(self._container_file, "br") as f: - upload_uri, path_in_udf = \ - self._bucketfs_location.upload_fileobj_to_bucketfs( - fileobj=f, bucket_file_path=self._container_file.name) - logging.debug("Container is uploaded to bucketfs") - return PurePosixPath(path_in_udf) - - def _generate_alter_command(self, alter_type: str, - path_in_udf: PurePosixPath) -> str: - new_settings = \ - self._update_previous_language_settings(alter_type, path_in_udf) - alter_command = \ - f"ALTER {alter_type} SET SCRIPT_LANGUAGES='{new_settings}';" - return alter_command - - def _update_previous_language_settings( - self, alter_type: str, path_in_udf: PurePosixPath) -> str: - prev_lang_settings = self._get_previous_language_settings(alter_type) - prev_lang_aliases = prev_lang_settings.split(" ") - self.check_if_requested_language_alias_already_exists(prev_lang_aliases) - new_definitions_str = self._generate_new_language_settings( - path_in_udf, prev_lang_aliases) - return new_definitions_str - - def _generate_new_language_settings(self, path_in_udf: PurePosixPath, - prev_lang_aliases: List[str]) -> str: - other_definitions = [ - alias_definition for alias_definition in prev_lang_aliases - if not alias_definition.startswith(self._language_alias + "=")] - path_in_udf_without_bucksts = Path(*path_in_udf.parts[2:]) - new_language_alias_definition = \ - f"{self._language_alias}=localzmq+protobuf:///" \ - f"{path_in_udf_without_bucksts}?lang=python#" \ - f"{path_in_udf}/exaudf/exaudfclient_py3" - new_definitions = other_definitions + [new_language_alias_definition] - new_definitions_str = " ".join(new_definitions) - return new_definitions_str - - def check_if_requested_language_alias_already_exists( - self, prev_lang_aliases: List[str]) -> None: - definition_for_requested_alias = [ - alias_definition for alias_definition in prev_lang_aliases - if alias_definition.startswith(self._language_alias + "=")] - if not len(definition_for_requested_alias) == 0: - logging.warning(f"The requested language alias " - f"{self._language_alias} is already in use.") - - def _get_previous_language_settings(self, alter_type: str) -> str: - result = self._pyexasol_conn.execute( - f"""SELECT "{alter_type}_VALUE" FROM SYS.EXA_PARAMETERS WHERE - PARAMETER_NAME='SCRIPT_LANGUAGES'""").fetchall() - return result[0][0] - - @classmethod - def run(cls, bucketfs_name: str, bucketfs_host: str, bucketfs_port: int, - bucketfs_use_https: bool, bucketfs_user: str, container_file: Path, - bucketfs_password: str, bucket: str, path_in_bucket: str, - dsn: str, db_user: str, db_password: str, language_alias: str): - - pyexasol_conn = pyexasol.connect( - dsn=dsn, user=db_user, password=db_password) - - _bucketfs_connection = BucketFSConnectionConfig( - host=bucketfs_host, port=bucketfs_port, user=bucketfs_user, - pwd=bucketfs_password, is_https=bucketfs_use_https) - _bucketfs_config = BucketFSConfig( - bucketfs_name=bucketfs_name, connection_config=_bucketfs_connection) - _bucket_config = BucketConfig( - bucket_name=bucket, bucketfs_config=_bucketfs_config) - bucketfs_location = BucketFSLocation( - bucket_config=_bucket_config, - base_path=PurePosixPath(path_in_bucket)) - - language_container_deployer = cls( - pyexasol_conn, language_alias, bucketfs_location, container_file) - language_container_deployer.deploy_container() diff --git a/exasol_advanced_analytics_framework/deployment/language_container_deployer_cli.py b/exasol_advanced_analytics_framework/deployment/language_container_deployer_cli.py deleted file mode 100644 index 8efd9dec..00000000 --- a/exasol_advanced_analytics_framework/deployment/language_container_deployer_cli.py +++ /dev/null @@ -1,66 +0,0 @@ -import click -from pathlib import Path -from exasol_advanced_analytics_framework.deployment import utils -from exasol_advanced_analytics_framework.deployment.language_container_deployer import \ - LanguageContainerDeployer - - -@click.command(name="language-container") -@click.option('--bucketfs-name', type=str, required=True) -@click.option('--bucketfs-host', type=str, required=True) -@click.option('--bucketfs-port', type=int, required=True) -@click.option('--bucketfs_use-https', type=bool, default=False) -@click.option('--bucketfs-user', type=str, required=True, default="w") -@click.option('--bucketfs-password', type=str) -@click.option('--bucket', type=str, required=True) -@click.option('--path-in-bucket', type=str, required=True, default=None) -@click.option('--container-file', type=click.Path(exists=True, file_okay=True), required=True) -@click.option('--dsn', type=str, required=True) -@click.option('--db-user', type=str, required=True) -@click.option('--db-pass', type=str) -@click.option('--language-alias', type=str, default="PYTHON3_AAF") -def language_container_deployer_main( - bucketfs_name: str, - bucketfs_host: str, - bucketfs_port: int, - bucketfs_use_https: bool, - bucketfs_user: str, - bucketfs_password: str, - bucket: str, - path_in_bucket: str, - container_file: str, - dsn: str, - db_user: str, - db_pass: str, - language_alias: str): - bucketfs_password = utils.get_password( - bucketfs_password, bucketfs_user, - utils.BUCKETFS_PASSWORD_ENVIRONMENT_VARIABLE, "BucketFS Password") - db_password = utils.get_password( - db_pass, db_user, - utils.DB_PASSWORD_ENVIRONMENT_VARIABLE, "DB Password") - - LanguageContainerDeployer.run( - bucketfs_name=bucketfs_name, - bucketfs_host=bucketfs_host, - bucketfs_port=bucketfs_port, - bucketfs_use_https=bucketfs_use_https, - bucketfs_user=bucketfs_user, - bucketfs_password=bucketfs_password, - bucket=bucket, - path_in_bucket=path_in_bucket, - container_file=Path(container_file), - dsn=dsn, - db_user=db_user, - db_password=db_password, - language_alias=language_alias - ) - - -if __name__ == '__main__': - import logging - logging.basicConfig( - format='%(asctime)s - %(module)s - %(message)s', - level=logging.DEBUG) - - language_container_deployer_main() diff --git a/exasol_advanced_analytics_framework/deployment/scripts_deployer.py b/exasol_advanced_analytics_framework/deployment/scripts_deployer.py index 3755894a..8ffbba2f 100644 --- a/exasol_advanced_analytics_framework/deployment/scripts_deployer.py +++ b/exasol_advanced_analytics_framework/deployment/scripts_deployer.py @@ -52,10 +52,8 @@ def deploy_scripts(self) -> None: @classmethod def run(cls, dsn: str, user: str, password: str, schema: str, language_alias: str, develop: bool): - if develop: save_aaf_query_loop_lua_script() - pyexasol_conn = pyexasol.connect(dsn=dsn, user=user, password=password) scripts_deployer = cls(language_alias, schema, pyexasol_conn) scripts_deployer.deploy_scripts() diff --git a/exasol_advanced_analytics_framework/deployment/scripts_deployer_cli.py b/exasol_advanced_analytics_framework/deployment/scripts_deployer_cli.py index ce261b9c..bf66113d 100644 --- a/exasol_advanced_analytics_framework/deployment/scripts_deployer_cli.py +++ b/exasol_advanced_analytics_framework/deployment/scripts_deployer_cli.py @@ -2,28 +2,27 @@ from exasol_advanced_analytics_framework.deployment import utils from exasol_advanced_analytics_framework.deployment.scripts_deployer import \ ScriptsDeployer - +from exasol_advanced_analytics_framework.slc import LANGUAGE_ALIAS @click.command(name="scripts") @click.option('--dsn', type=str, required=True) @click.option('--user', type=str, required=True) @click.option('--pass', 'pwd', type=str) @click.option('--schema', type=str, required=True) -@click.option('--language-alias', type=str, default="PYTHON3_AAF") +@click.option('--language-alias', type=str, default=LANGUAGE_ALIAS) @click.option('--develop', type=bool, is_flag=True) def scripts_deployer_main( dsn: str, user: str, pwd: str, schema: str, language_alias: str, develop: bool): password = utils.get_password( pwd, user, utils.DB_PASSWORD_ENVIRONMENT_VARIABLE, "DB Password") - ScriptsDeployer.run( dsn=dsn, user=user, password=password, schema=schema, language_alias=language_alias, - develop=develop + develop=develop, ) diff --git a/exasol_advanced_analytics_framework/slc.py b/exasol_advanced_analytics_framework/slc.py new file mode 100644 index 00000000..594b4943 --- /dev/null +++ b/exasol_advanced_analytics_framework/slc.py @@ -0,0 +1,18 @@ +from contextlib import contextmanager +from exasol.python_extension_common.deployment.language_container_builder import ( + LanguageContainerBuilder, + find_path_backwards +) + +LANGUAGE_ALIAS = "PYTHON3_AAF" +SLC_NAME = "exasol_advanced_analytics_framework_container" +SLC_FILE_NAME = SLC_NAME + ".tar.gz" +SLC_URL_FORMATTER = "https://github.com/exasol/advanced_analytics_framework/releases/download/{version}/" + SLC_NAME + + +@contextmanager +def custom_slc_builder() -> LanguageContainerBuilder: + project_directory = find_path_backwards("pyproject.toml", __file__).parent + with LanguageContainerBuilder(SLC_NAME) as builder: + builder.prepare_flavor(project_directory) + yield builder diff --git a/exasol_data_science_utils_python/schema/column_name.py b/exasol_data_science_utils_python/schema/column_name.py index 82bb8776..36982031 100644 --- a/exasol_data_science_utils_python/schema/column_name.py +++ b/exasol_data_science_utils_python/schema/column_name.py @@ -9,7 +9,7 @@ class ColumnName(ExasolIdentifierImpl): @typechecked - def __init__(self, name: str, table_like_name: TableLikeName = None): + def __init__(self, name: str, table_like_name: TableLikeName|None = None): super().__init__(name) self._table_like_name = table_like_name diff --git a/exasol_data_science_utils_python/utils/data_classes_runtime_type_check.py b/exasol_data_science_utils_python/utils/data_classes_runtime_type_check.py index c7c20479..9fdd1b28 100644 --- a/exasol_data_science_utils_python/utils/data_classes_runtime_type_check.py +++ b/exasol_data_science_utils_python/utils/data_classes_runtime_type_check.py @@ -1,10 +1,12 @@ from dataclasses import fields import typeguard - +from typeguard import TypeCheckError def check_dataclass_types(datacls): for field in fields(datacls): - typeguard.check_type(value=datacls.__dict__[field.name], - expected_type=field.type, - argname=field.name) + try: + typeguard.check_type(value=datacls.__dict__[field.name], + expected_type=field.type) + except TypeCheckError as e: + raise TypeCheckError(f"Field '{field.name}' has wrong type: {e}") diff --git a/noxfile.py b/noxfile.py index 9adc5a50..fb629138 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,6 +1,8 @@ import json import os from pathlib import Path +from exasol_advanced_analytics_framework.slc import custom_slc_builder +from datetime import datetime import nox from nox import Session @@ -108,11 +110,17 @@ def start_integration_test_environment(session: Session): @nox.session(python=False) def build_language_container(session: Session): - script_path = ROOT_DIR / "build_language_container.sh" - session.run(str(script_path)) + export_path = ROOT_DIR / ".slc" + with custom_slc_builder() as builder: + builder.export(export_path) @nox.session(python=False) def run_python_integration_tests_with_db(session: Session): integration_test_directory = INTEGRATION_TEST_DIRECTORY / "with_db" - _run_in_dev_env_poetry_call(session, "pytest", str(integration_test_directory)) + _run_in_dev_env_poetry_call( + session, + "pytest", + str(integration_test_directory), + *session.posargs, + ) diff --git a/poetry.lock b/poetry.lock index 1bb62819..5f7c5d6e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,6 +11,28 @@ files = [ {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] +[[package]] +name = "anyio" +version = "4.6.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +files = [ + {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, + {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] + [[package]] name = "argcomplete" version = "3.5.0" @@ -27,18 +49,37 @@ test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] [[package]] name = "astroid" -version = "3.2.4" +version = "3.3.4" description = "An abstract syntax tree for Python with inference support." optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" files = [ - {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, - {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, + {file = "astroid-3.3.4-py3-none-any.whl", hash = "sha256:5eba185467253501b62a9f113c263524b4f5d55e1b30456370eed4cdbd6438fd"}, + {file = "astroid-3.3.4.tar.gz", hash = "sha256:e73d0b62dd680a7c07cb2cd0ce3c22570b044dd01bd994bc3a2dd16c6cbba162"}, ] [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} +[[package]] +name = "attrs" +version = "24.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, +] + +[package.extras] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] + [[package]] name = "babel" version = "2.16.0" @@ -667,19 +708,22 @@ vectorstore-mmr = ["numpy (>=1)", "simsimd (>=3)"] [[package]] name = "exasol-bucketfs" -version = "0.8.0" +version = "0.13.0" description = "BucketFS utilities for the Python programming language" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "exasol_bucketfs-0.8.0-py3-none-any.whl", hash = "sha256:3bdee82faded20f65b559ba154d2f17812c62af8d0642ff0dac8ab4d996ba0e0"}, - {file = "exasol_bucketfs-0.8.0.tar.gz", hash = "sha256:924a757bc6608fe29a9bbe6ddd4240583410669f8958f9859df874f7e57b4bc9"}, + {file = "exasol_bucketfs-0.13.0-py3-none-any.whl", hash = "sha256:df661fe184c61c4b89289c4aa88ffed0f8de74f5f934ff2dc1ecb19fc892bf04"}, + {file = "exasol_bucketfs-0.13.0.tar.gz", hash = "sha256:55318de6ae18fb87670f61b2a0824204d22ff63c755c65be17c4abcc4b753137"}, ] [package.dependencies] +attrs = ">=23.2.0" +exasol-saas-api = ">=0.3.0" +httpx = ">=0.27.0" joblib = ">=1.0.1" requests = ">=2.24.0" -typeguard = ">=2.11.1,<3.0.0" +typeguard = ">=4.3.0" [[package]] name = "exasol-error-reporting" @@ -694,13 +738,13 @@ files = [ [[package]] name = "exasol-integration-test-docker-environment" -version = "3.1.0" +version = "3.2.0" description = "Integration Test Docker Environment for Exasol" optional = false python-versions = "<4,>=3.8" files = [ - {file = "exasol_integration_test_docker_environment-3.1.0-py3-none-any.whl", hash = "sha256:ce3cf917bd660ef5e1c59281ba0c1ea7c7dc3c43c9454040a6e1a3d90124a1e4"}, - {file = "exasol_integration_test_docker_environment-3.1.0.tar.gz", hash = "sha256:dbcbb967bb3458f74c8764a1cfc4f62ab215b79ee754bb4d17d4ef54c74f7f0a"}, + {file = "exasol_integration_test_docker_environment-3.2.0-py3-none-any.whl", hash = "sha256:b0fc41a70b73ec5ad43171e2c8fcf76a54a1eb31befa09e6a3214af55f1c93fc"}, + {file = "exasol_integration_test_docker_environment-3.2.0.tar.gz", hash = "sha256:47b11dde66be0149d54cfa6d7eebc4665bd996fccb22f64ce6489a07cec7b331"}, ] [package.dependencies] @@ -724,6 +768,64 @@ requests = ">=2.21.0" simplejson = ">=3.16.0" "stopwatch.py" = ">=1.0.0" +[[package]] +name = "exasol-python-extension-common" +version = "0.6.0" +description = "A collection of common utilities for Exasol extensions." +optional = false +python-versions = "<4.0.0,>=3.10.0" +files = [ + {file = "exasol_python_extension_common-0.6.0-py3-none-any.whl", hash = "sha256:434c01936ab67ab00b1fc8f218d2eb90c415a93e9cef9fb9910fdbde6e02bc81"}, + {file = "exasol_python_extension_common-0.6.0.tar.gz", hash = "sha256:c121153f6605601e22ddcd3f9afa9a24cb4f05c1de030c7871a594a419f17575"}, +] + +[package.dependencies] +click = ">=8.1.7,<9.0.0" +exasol-bucketfs = ">=0.10.0" +exasol-saas-api = ">=0.7.0,<1.0.0" +exasol-script-languages-container-tool = ">=1.0.0,<2.0.0" +pyexasol = ">=0.25.0,<1.0.0" +requests = ">=2.32.0" +tenacity = ">=8.3.0,<9.0.0" + +[[package]] +name = "exasol-saas-api" +version = "0.10.0" +description = "API enabling Python applications connecting to Exasol database SaaS instances and using their SaaS services" +optional = false +python-versions = "<4.0.0,>=3.10.0" +files = [ + {file = "exasol_saas_api-0.10.0-py3-none-any.whl", hash = "sha256:13ad9f62e37e95e0359da875d44369c38c511b72b374cdc46bfffe0de2d3f730"}, + {file = "exasol_saas_api-0.10.0.tar.gz", hash = "sha256:8e7febf0c1eb777c38f5631cfb8d45c82a63b463a1e43134117c7fde508a8504"}, +] + +[package.dependencies] +attrs = ">=21.3.0" +httpx = ">=0.20.0" +ifaddr = ">=0.2.0,<0.3.0" +python-dateutil = ">=2.8.0,<3.0.0" +requests = ">=2.31.0,<3.0.0" +tenacity = ">=8.2.3,<9.0.0" +types-requests = ">=2.31.0.6,<3.0.0.0" + +[[package]] +name = "exasol-script-languages-container-tool" +version = "1.0.0" +description = "Script Languages Container Tool" +optional = false +python-versions = "<4,>=3.10" +files = [ + {file = "exasol_script_languages_container_tool-1.0.0-py3-none-any.whl", hash = "sha256:8b87243782266f55b1bd6c153275cba1f55a8406bca99c2d4d37934b3e8bcad4"}, + {file = "exasol_script_languages_container_tool-1.0.0.tar.gz", hash = "sha256:39f0da9e8a55bc0fec1477f5d6c03761ef0abbd841bfc5909253e12e1ae0bcc1"}, +] + +[package.dependencies] +exasol-bucketfs = ">=0.13.0,<0.14.0" +exasol-integration-test-docker-environment = ">=3.1.0,<4.0.0" +importlib_metadata = ">=4.6.0" +importlib-resources = ">=6.4.0" +networkx = ">=3.3.0,<4.0.0" + [[package]] name = "exasol-toolbox" version = "0.14.0" @@ -808,13 +910,13 @@ pytest = ["pytest (>=7)"] [[package]] name = "faker" -version = "28.4.1" +version = "29.0.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-28.4.1-py3-none-any.whl", hash = "sha256:e59c01d1e8b8e20a83255ab8232c143cb2af3b4f5ab6a3f5ce495f385ad8ab4c"}, - {file = "faker-28.4.1.tar.gz", hash = "sha256:4294d169255a045990720d6f3fa4134b764a4cdf46ef0d3c7553d2506f1adaa1"}, + {file = "Faker-29.0.0-py3-none-any.whl", hash = "sha256:32d0ee7d42925ff06e4a7d906ee7efbf34f5052a41a2a1eb8bb174a422a5498f"}, + {file = "faker-29.0.0.tar.gz", hash = "sha256:34e89aec594cad9773431ca479ee95c7ce03dd9f22fda2524e2373b880a2fa77"}, ] [package.dependencies] @@ -822,18 +924,18 @@ python-dateutil = ">=2.4" [[package]] name = "filelock" -version = "3.16.0" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, - {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] @@ -885,6 +987,63 @@ gitdb = ">=4.0.1,<5" doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "humanfriendly" version = "10.0" @@ -901,13 +1060,13 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve [[package]] name = "identify" -version = "2.6.0" +version = "2.6.1" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, - {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, + {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, + {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, ] [package.extras] @@ -915,13 +1074,27 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.8" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ - {file = "idna-3.8-py3-none-any.whl", hash = "sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac"}, - {file = "idna-3.8.tar.gz", hash = "sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "ifaddr" +version = "0.2.0" +description = "Cross-platform network interface and IP address enumeration library" +optional = false +python-versions = "*" +files = [ + {file = "ifaddr-0.2.0-py3-none-any.whl", hash = "sha256:085e0305cfe6f16ab12d72e2024030f5d52674afad6911bb1eee207177b8a748"}, + {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, ] [[package]] @@ -935,6 +1108,29 @@ files = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] +[[package]] +name = "importlib-metadata" +version = "8.5.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + [[package]] name = "importlib-resources" version = "6.4.5" @@ -1443,8 +1639,8 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.1" pytz = ">=2020.1" @@ -1454,13 +1650,13 @@ test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] [[package]] name = "paramiko" -version = "3.4.1" +version = "3.5.0" description = "SSH2 protocol library" optional = false python-versions = ">=3.6" files = [ - {file = "paramiko-3.4.1-py3-none-any.whl", hash = "sha256:8e49fd2f82f84acf7ffd57c64311aa2b30e575370dc23bdb375b10262f7eac32"}, - {file = "paramiko-3.4.1.tar.gz", hash = "sha256:8b15302870af7f6652f2e038975c1d2973f06046cb5d7d65355668b3ecbece0c"}, + {file = "paramiko-3.5.0-py3-none-any.whl", hash = "sha256:1fedf06b085359051cd7d0d270cebe19e755a8a921cc2ddbfa647fb0cd7d68f9"}, + {file = "paramiko-3.5.0.tar.gz", hash = "sha256:ad11e540da4f55cedda52931f1a3f812a8238a7af7f62a60de538cd80bb28124"}, ] [package.dependencies] @@ -1486,13 +1682,13 @@ files = [ [[package]] name = "platformdirs" -version = "4.3.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.3.2-py3-none-any.whl", hash = "sha256:eb1c8582560b34ed4ba105009a4badf7f6f85768b30126f351328507b2beb617"}, - {file = "platformdirs-4.3.2.tar.gz", hash = "sha256:9e5e27a08aa095dd127b9f2e764d74254f482fef22b0970773bfba79d091ab8c"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] @@ -1517,13 +1713,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "polyfactory" -version = "2.16.2" +version = "2.17.0" description = "Mock data generation factories" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "polyfactory-2.16.2-py3-none-any.whl", hash = "sha256:e5eaf97358fee07d0d8de86a93e81dc56e3be1e1514d145fea6c5f486cda6ea1"}, - {file = "polyfactory-2.16.2.tar.gz", hash = "sha256:6d0d90deb85e5bb1733ea8744c2d44eea2b31656e11b4fa73832d2e2ab5422da"}, + {file = "polyfactory-2.17.0-py3-none-any.whl", hash = "sha256:71b677c17bb7cebad9a5631b1aca7718280bdcedc1c25278253717882d1ac294"}, + {file = "polyfactory-2.17.0.tar.gz", hash = "sha256:099d86f7c79c51a2caaf7c8598cc56e7b0a57c11b5918ddf699e82380735b6b7"}, ] [package.dependencies] @@ -1596,13 +1792,13 @@ pytest-plugin = ["pytest-prysk (>=0.2.0,<0.3.0)"] [[package]] name = "pyasn1" -version = "0.6.0" +version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] [[package]] @@ -1696,13 +1892,13 @@ tests = ["chardet", "parameterized", "ruff", "tox", "unittest-parallel"] [[package]] name = "pyexasol" -version = "0.25.2" +version = "0.27.0" description = "Exasol python driver with extra features" optional = false -python-versions = ">=3.6" +python-versions = "<4.0,>=3.9" files = [ - {file = "pyexasol-0.25.2-py3-none-any.whl", hash = "sha256:54be5c75f0867a4838b84b5b5a37466c33fa9b1ca6bf51d9c3d821d367936e6e"}, - {file = "pyexasol-0.25.2.tar.gz", hash = "sha256:3b42cb2c32b7b2ffe7a78c82bf21c3a391043758f2a575c48460252a72386691"}, + {file = "pyexasol-0.27.0-py3-none-any.whl", hash = "sha256:041e1db1ae8f26fa24876056ce8da5830e346fc47734847a95cc960ac056d0c6"}, + {file = "pyexasol-0.27.0.tar.gz", hash = "sha256:795490cd810617bf927e0e90b88347facb5e66f58e0e16378fef5d22fadacd60"}, ] [package.dependencies] @@ -1713,8 +1909,9 @@ websocket-client = ">=1.0.1" [package.extras] examples = ["pproxy"] +numpy = ["numpy (>1.26.0,<2)"] orjson = ["orjson (>=3.6)"] -pandas = ["pandas"] +pandas = ["pandas[numpy] (>=2,<3)"] rapidjson = ["python-rapidjson"] ujson = ["ujson"] @@ -1734,22 +1931,22 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pylint" -version = "3.2.7" +version = "3.3.1" description = "python code static checker" optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" files = [ - {file = "pylint-3.2.7-py3-none-any.whl", hash = "sha256:02f4aedeac91be69fb3b4bea997ce580a4ac68ce58b89eaefeaf06749df73f4b"}, - {file = "pylint-3.2.7.tar.gz", hash = "sha256:1b7a721b575eaeaa7d39db076b6e7743c993ea44f57979127c517c6c572c803e"}, + {file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"}, + {file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"}, ] [package.dependencies] -astroid = ">=3.2.4,<=3.3.0-dev0" +astroid = ">=3.3.4,<=3.4.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, - {version = ">=0.2", markers = "python_version < \"3.11\""}, ] isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" @@ -1821,15 +2018,18 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyreadline3" -version = "3.4.1" +version = "3.5.4" description = "A python implementation of GNU readline." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, - {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, + {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, + {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, ] +[package.extras] +dev = ["build", "flake8", "mypy", "pytest", "twine"] + [[package]] name = "pytest" version = "7.4.4" @@ -1885,6 +2085,55 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] +[[package]] +name = "pytest-exasol-backend" +version = "0.3.0" +description = "" +optional = false +python-versions = "<4,>=3.9" +files = [ + {file = "pytest_exasol_backend-0.3.0-py3-none-any.whl", hash = "sha256:57701d24fa963e08e742938440cdf87840f57d04d096f2cbff0fa0a0f95c75af"}, + {file = "pytest_exasol_backend-0.3.0.tar.gz", hash = "sha256:165a3d47b309a1385d37e4b7908eaba832139e9683d7b30cbaaa7decd1052302"}, +] + +[package.dependencies] +exasol-integration-test-docker-environment = ">=3.1.0,<4.0.0" +exasol-saas-api = ">=0.6.0,<1.0.0" +pytest = ">=7,<9" + +[[package]] +name = "pytest-exasol-extension" +version = "0.1.0" +description = "" +optional = false +python-versions = "<4,>=3.10" +files = [ + {file = "pytest_exasol_extension-0.1.0-py3-none-any.whl", hash = "sha256:f0a20b9d4d7cc01dfc3a7cd6c3d99cee70ced56954a9784b9e007d455da788e8"}, + {file = "pytest_exasol_extension-0.1.0.tar.gz", hash = "sha256:b92f268706efad28c87802c1f546f218349733a37f23f1046520c24352aaf6c4"}, +] + +[package.dependencies] +pyexasol = ">=0.26.0,<1" +pytest = ">=7,<9" +pytest-exasol-backend = ">=0.2.0" + +[[package]] +name = "pytest-exasol-slc" +version = "0.3.0" +description = "" +optional = false +python-versions = "<4,>=3.10" +files = [ + {file = "pytest_exasol_slc-0.3.0-py3-none-any.whl", hash = "sha256:5219eb310390337231e95844aae28bfe243a6b6fff2058f39033b3fd3c651357"}, + {file = "pytest_exasol_slc-0.3.0.tar.gz", hash = "sha256:548705d34135bfefdfddb5a923540bde0b332d12e947a13ca0115f19cc84aa9b"}, +] + +[package.dependencies] +exasol-python-extension-common = ">=0.5.0" +pytest = ">=7,<9" +pytest-exasol-backend = ">=0.3.0" +pytest-exasol-extension = ">=0.1.0" + [[package]] name = "pytest-prysk" version = "0.2.0" @@ -1950,13 +2199,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] @@ -2202,13 +2451,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.8.0" +version = "13.8.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.8.0-py3-none-any.whl", hash = "sha256:2e85306a063b9492dffc86278197a60cbece75bcb766022f3436f567cae11bdc"}, - {file = "rich-13.8.0.tar.gz", hash = "sha256:a5ac1f1cd448ade0d59cc3356f7db7a7ccda2c8cbae9c7a90c28ff463d3e91f4"}, + {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, + {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, ] [package.dependencies] @@ -2234,18 +2483,18 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "74.1.2" +version = "75.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.1.2-py3-none-any.whl", hash = "sha256:5f4c08aa4d3ebcb57a50c33b1b07e94315d7fc7230f7115e47fc99776c8ce308"}, - {file = "setuptools-74.1.2.tar.gz", hash = "sha256:95b40ed940a1c67eb70fc099094bd6e99c6ee7c23aa2306f4d2697ba7916f9c6"}, + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] @@ -2418,6 +2667,17 @@ files = [ {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, ] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -2735,18 +2995,21 @@ files = [ [[package]] name = "typeguard" -version = "2.13.3" +version = "4.3.0" description = "Run-time type checker for Python" optional = false -python-versions = ">=3.5.3" +python-versions = ">=3.8" files = [ - {file = "typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1"}, - {file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"}, + {file = "typeguard-4.3.0-py3-none-any.whl", hash = "sha256:4d24c5b39a117f8a895b9da7a9b3114f04eb63bade45a4492de49b175b6f7dfa"}, + {file = "typeguard-4.3.0.tar.gz", hash = "sha256:92ee6a0aec9135181eae6067ebd617fd9de8d75d714fb548728a4933b1dea651"}, ] +[package.dependencies] +typing-extensions = ">=4.10.0" + [package.extras] -doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["mypy", "pytest", "typing-extensions"] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)"] +test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"] [[package]] name = "typer" @@ -2765,6 +3028,20 @@ rich = ">=10.11.0" shellingham = ">=1.3.0" typing-extensions = ">=3.7.4.3" +[[package]] +name = "types-requests" +version = "2.32.0.20240914" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.32.0.20240914.tar.gz", hash = "sha256:2850e178db3919d9bf809e434eef65ba49d0e7e33ac92d588f4a5e295fffd405"}, + {file = "types_requests-2.32.0.20240914-py3-none-any.whl", hash = "sha256:59c2f673eb55f32a99b2894faf6020e1a9f4a402ad0f192bfee0b64469054310"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "typing-extensions" version = "4.12.2" @@ -2778,13 +3055,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -2795,13 +3072,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.4" +version = "20.26.5" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.4-py3-none-any.whl", hash = "sha256:48f2695d9809277003f30776d155615ffc11328e6a0a8c1f0ec80188d7874a55"}, - {file = "virtualenv-20.26.4.tar.gz", hash = "sha256:c17f4e0f3e6036e9f26700446f85c76ab11df65ff6d8a9cbfad9f71aabfcf23c"}, + {file = "virtualenv-20.26.5-py3-none-any.whl", hash = "sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6"}, + {file = "virtualenv-20.26.5.tar.gz", hash = "sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4"}, ] [package.dependencies] @@ -2908,7 +3185,26 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] +[[package]] +name = "zipp" +version = "3.20.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "59b536a6edce713e7a9780e80c63191de77d05744f160988c03e1b7a1227c24d" +content-hash = "601dc4649c8b0a8a0e4d725ffe2d44679cca31c477d96a84c8996cf5edd90fee" diff --git a/pyproject.toml b/pyproject.toml index ea3d8340..520c7d1b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,13 +26,13 @@ jinja2 = "^3.0.3" importlib-resources = "^6.4.0" exasol-bucketfs = ">=0.6.0,<1.0.0" click = "^8.0.4" -pyexasol = "^0.25.0" +pyexasol = ">=0.25.0,<1.0.0" pandas = "^1.1.0" pydantic = "^1.10.2" pyzmq = "^26.0.3" sortedcontainers = "^2.4.0" structlog = "^24.2.0" -typeguard = "<3.0.0" +typeguard = "^4.0.0" nox = "^2024.4.15" [tool.poetry.group.dev.dependencies] @@ -45,6 +45,11 @@ polyfactory = "^2.8.0" pytest-repeat = ">=0.9.1" elasticsearch = "^8.6.1" exasol-toolbox = ">=0.14.0" +exasol-python-extension-common = ">=0.6.0,<1.0.0" +exasol-script-languages-container-tool = "^1.0.0" +pytest-exasol-slc = ">=0.1.0,<1.0.0" +pytest-exasol-backend = ">=0.3.0,<1.0.0" +pytest-exasol-extension = ">=0.1.0,<1.0.0" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/scripts/install_micromamba.sh b/scripts/install_micromamba.sh index 5ff171d9..559963a8 100755 --- a/scripts/install_micromamba.sh +++ b/scripts/install_micromamba.sh @@ -3,8 +3,8 @@ set -euo pipefail cd $HOME mkdir bin || true -wget -qO- https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba -./bin/micromamba shell init -s bash -p ~/micromamba +wget -qO- https://micro.mamba.pm/api/micromamba/linux-64/2.0.0 | tar -xvj bin/micromamba +./bin/micromamba shell init -s bash --root-prefix ~/micromamba echo 'export PATH="$HOME/bin:$PATH"' >>"$HOME"/.bashrc export PATH="$HOME/bin:$PATH" diff --git a/tests/conftest.py b/tests/conftest.py index 26fb5364..d79194e1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,7 +1,4 @@ pytest_plugins = [ - "tests.integration_tests.with_db.fixtures.database_connection_fixture", - "tests.integration_tests.with_db.fixtures.build_language_container_fixture", - "tests.integration_tests.with_db.fixtures.upload_language_container_fixture", "tests.integration_tests.with_db.fixtures.setup_database_fixture", "tests.unit_tests.query_handler.fixtures" ] diff --git a/tests/deployment/test_language_container_deployer.py b/tests/deployment/test_language_container_deployer.py deleted file mode 100644 index 23f07100..00000000 --- a/tests/deployment/test_language_container_deployer.py +++ /dev/null @@ -1,59 +0,0 @@ -import textwrap -from exasol_bucketfs_utils_python.bucketfs_factory import BucketFSFactory -from exasol_advanced_analytics_framework.deployment.language_container_deployer \ - import LanguageContainerDeployer -from tests.utils.parameters import bucketfs_params -from tests.utils.revert_language_settings import revert_language_settings -from tests.utils.db_queries import DBQueries -from pathlib import Path - - -@revert_language_settings -def _call_deploy_language_container_deployer( - language_alias, schema, db_conn, container_path, language_settings): - db_conn.execute(f"DROP SCHEMA IF EXISTS {schema} CASCADE;") - db_conn.execute(f"CREATE SCHEMA IF NOT EXISTS {schema};") - - # call language container deployer - bucket_fs_factory = BucketFSFactory() - bucketfs_location = bucket_fs_factory.create_bucketfs_location( - url=f"http://{bucketfs_params.host}:{bucketfs_params.port}/" - f"{bucketfs_params.bucket}/{bucketfs_params.path_in_bucket};" - f"{bucketfs_params.name}", - user=f"{bucketfs_params.user}", - pwd=f"{bucketfs_params.password}", - base_path=None) - language_container_deployer = LanguageContainerDeployer( - db_conn, language_alias, bucketfs_location, container_path) - language_container_deployer.deploy_container() - - # create a sample UDF using the new language alias - db_conn.execute(textwrap.dedent(f""" - CREATE OR REPLACE {language_alias} SCALAR SCRIPT "TEST_UDF"() - RETURNS BOOLEAN AS - - def run(ctx): - return True - - / - """)) - result = db_conn.execute('SELECT "TEST_UDF"()').fetchall() - return result - - -def test_language_container_deployer( - request, pyexasol_connection, language_container): - schema_name = request.node.name - language_settings = DBQueries.get_language_settings(pyexasol_connection) - - result = _call_deploy_language_container_deployer( - "PYTHON_AAF_DEPLOY_TEST", - schema_name, - pyexasol_connection, - Path(language_container["container_path"]), - language_settings - ) - - assert result[0][0] - - diff --git a/tests/deployment/test_language_container_deployer_cli.py b/tests/deployment/test_language_container_deployer_cli.py deleted file mode 100644 index c804ea51..00000000 --- a/tests/deployment/test_language_container_deployer_cli.py +++ /dev/null @@ -1,72 +0,0 @@ -import textwrap -import pyexasol -from click.testing import CliRunner -from exasol_advanced_analytics_framework import deploy -from tests.utils.parameters import bucketfs_params, db_params -from tests.utils.revert_language_settings import revert_language_settings -from tests.utils.db_queries import DBQueries -from pathlib import Path - - -@revert_language_settings -def _call_deploy_language_container_deployer_cli( - language_alias, schema, db_conn, container_path, language_settings): - db_conn.execute(f"DROP SCHEMA IF EXISTS {schema} CASCADE;") - db_conn.execute(f"CREATE SCHEMA IF NOT EXISTS {schema};") - - # call language container deployer - args_list = [ - "language-container", - "--bucketfs-name", bucketfs_params.name, - "--bucketfs-host", bucketfs_params.host, - "--bucketfs-port", bucketfs_params.port, - "--bucketfs_use-https", False, - "--bucketfs-user", bucketfs_params.user, - "--bucketfs-password", bucketfs_params.password, - "--bucket", bucketfs_params.bucket, - "--path-in-bucket", bucketfs_params.path_in_bucket, - "--container-file", container_path, - "--dsn", db_params.address(), - "--db-user", db_params.user, - "--db-pass", db_params.password, - "--language-alias", language_alias - ] - runner = CliRunner() - result = runner.invoke(deploy.main, args_list) - assert result.exit_code == 0 - - # create a sample UDF using the new language alias - db_conn_test = pyexasol.connect( - dsn=db_params.address(), - user=db_params.user, - password=db_params.password) - db_conn_test.execute(f"OPEN SCHEMA {schema}") - db_conn_test.execute(textwrap.dedent(f""" - CREATE OR REPLACE {language_alias} SCALAR SCRIPT "TEST_UDF"() - RETURNS BOOLEAN AS - - def run(ctx): - return True - - / - """)) - result = db_conn_test.execute('SELECT "TEST_UDF"()').fetchall() - return result - - -def test_language_container_deployer_cli( - request, pyexasol_connection, language_container): - schema_name = request.node.name - language_settings = DBQueries.get_language_settings(pyexasol_connection) - - result = _call_deploy_language_container_deployer_cli( - "PYTHON_AAF_DEPLOY_TEST", - schema_name, - pyexasol_connection, - Path(language_container["container_path"]), - language_settings - ) - - assert result[0][0] - - diff --git a/tests/deployment/test_scripts_deployer.py b/tests/deployment/test_scripts_deployer.py index 934f13e9..938a7d24 100644 --- a/tests/deployment/test_scripts_deployer.py +++ b/tests/deployment/test_scripts_deployer.py @@ -1,23 +1,18 @@ from exasol_advanced_analytics_framework.deployment.scripts_deployer import \ ScriptsDeployer +from exasol_advanced_analytics_framework.deployment.aaf_exasol_lua_script_generator import \ + save_aaf_query_loop_lua_script from tests.utils.db_queries import DBQueries -from tests.utils.parameters import db_params -def test_scripts_deployer(upload_language_container, - pyexasol_connection, request): - +def test_scripts_deployer(deployed_slc, language_alias, pyexasol_connection, request): schema_name = request.node.name pyexasol_connection.execute(f"DROP SCHEMA IF EXISTS {schema_name} CASCADE;") - - language_alias = upload_language_container - ScriptsDeployer.run( - dsn=db_params.address(), - user=db_params.user, - password=db_params.password, - schema=schema_name, - language_alias=language_alias, - develop=True - ) + save_aaf_query_loop_lua_script() + ScriptsDeployer( + language_alias, + schema_name, + pyexasol_connection, + ).deploy_scripts() assert DBQueries.check_all_scripts_deployed( pyexasol_connection, schema_name) diff --git a/tests/deployment/test_scripts_deployer_cli.py b/tests/deployment/test_scripts_deployer_cli.py index 52ca388c..fdfd0160 100644 --- a/tests/deployment/test_scripts_deployer_cli.py +++ b/tests/deployment/test_scripts_deployer_cli.py @@ -1,22 +1,24 @@ from click.testing import CliRunner from exasol_advanced_analytics_framework import deploy from tests.utils.db_queries import DBQueries -from tests.utils.parameters import db_params +from exasol_advanced_analytics_framework.slc import LANGUAGE_ALIAS def test_scripts_deployer_cli(upload_language_container, + backend_aware_database_params, pyexasol_connection, request): schema_name = request.node.name pyexasol_connection.execute(f"DROP SCHEMA IF EXISTS {schema_name} CASCADE;") - - language_alias = "PYTHON3_AAF" + dsn = backend_aware_database_params["dsn"] + user = backend_aware_database_params["user"] + password = backend_aware_database_params["password"] args_list = [ "scripts", - "--dsn", db_params.address(), - "--user", db_params.user, - "--pass", db_params.password, + "--dsn", dns, + "--user", user, + "--pass", password, "--schema", schema_name, - "--language-alias", language_alias + "--language-alias", LANGUAGE_ALIAS ] runner = CliRunner() result = runner.invoke(deploy.main, args_list) diff --git a/tests/integration_tests/with_db/conftest.py b/tests/integration_tests/with_db/conftest.py new file mode 100644 index 00000000..042be1bb --- /dev/null +++ b/tests/integration_tests/with_db/conftest.py @@ -0,0 +1,36 @@ +import pytest + +from exasol_advanced_analytics_framework.slc import ( + custom_slc_builder, + LANGUAGE_ALIAS, +) +from exasol.python_extension_common.deployment.language_container_builder import ( + find_path_backwards, + LanguageContainerBuilder, +) + + +@pytest.fixture(scope="session") +def language_alias(): + return LANGUAGE_ALIAS + + +@pytest.fixture(scope="session") +def slc_builder(use_onprem, use_saas) -> LanguageContainerBuilder: + """ + Overrides default definition from pytest-exasol-slc. + + This slc_builder uses the default builder of the AAF, defined in + exasol_advanced_analytics_framework.slc.custom_slc_builder and adds + another wheel and its pip requirements on top. + + In result the SLC will contain AAF and the subproject from directory + tests/test_package. + """ + if use_saas or use_onprem: + test_package = find_path_backwards("tests/test_package", __file__) + with custom_slc_builder() as builder: + builder.prepare_flavor(test_package) + yield builder + else: + yield None diff --git a/tests/integration_tests/with_db/fixtures/build_language_container_fixture.py b/tests/integration_tests/with_db/fixtures/build_language_container_fixture.py deleted file mode 100644 index ad70ed7e..00000000 --- a/tests/integration_tests/with_db/fixtures/build_language_container_fixture.py +++ /dev/null @@ -1,42 +0,0 @@ -import subprocess -from pathlib import Path -import pytest - - -def find_script(script_name: str) -> Path: - current_path = Path(__file__).parent - script_path = None - while current_path != current_path.root: - script_path = Path(current_path, script_name) - if script_path.exists(): - break - current_path = current_path.parent - if script_path.exists(): - return script_path - else: - raise RuntimeError(f"Could not find {script_name}") - - -@pytest.fixture(scope="session") -def language_container() -> dict: - script_dir = find_script("build_language_container.sh") - completed_process = subprocess.run([script_dir], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - output = completed_process.stdout.decode("UTF-8") - print(output) - - completed_process.check_returncode() - lines = output.splitlines() - alter_session_selector = "ALTER SESSION SET SCRIPT_LANGUAGES='" - alter_session = [line for line in lines - if line.startswith(alter_session_selector)][0] - alter_session = alter_session[len(alter_session_selector):-2] - - container_path_selector = "Cached container under " - container_path = [line for line in lines - if line.startswith(container_path_selector)][0] - container_path = container_path[len(container_path_selector):] - - return {"container_path": container_path, - "alter_session": alter_session} diff --git a/tests/integration_tests/with_db/fixtures/database_connection_fixture.py b/tests/integration_tests/with_db/fixtures/database_connection_fixture.py deleted file mode 100644 index 317ab19b..00000000 --- a/tests/integration_tests/with_db/fixtures/database_connection_fixture.py +++ /dev/null @@ -1,13 +0,0 @@ -import pytest -import pyexasol -from tests.utils.parameters import db_params - - -@pytest.fixture(scope="session") -def pyexasol_connection() -> pyexasol.ExaConnection: - conn = pyexasol.connect( - dsn=db_params.address(), - user=db_params.user, - password=db_params.password) - return conn - diff --git a/tests/integration_tests/with_db/fixtures/setup_database_fixture.py b/tests/integration_tests/with_db/fixtures/setup_database_fixture.py index f1b1a193..301c7789 100644 --- a/tests/integration_tests/with_db/fixtures/setup_database_fixture.py +++ b/tests/integration_tests/with_db/fixtures/setup_database_fixture.py @@ -1,43 +1,79 @@ import pytest -from typing import Tuple -from exasol_advanced_analytics_framework.deployment.scripts_deployer import \ - ScriptsDeployer -from tests.utils.parameters import db_params, bucketfs_params +import pyexasol +from typing import Any, Tuple, Callable +from exasol_advanced_analytics_framework.deployment.scripts_deployer import ScriptsDeployer +from exasol_advanced_analytics_framework.deployment.aaf_exasol_lua_script_generator import \ + save_aaf_query_loop_lua_script -bucketfs_connection_name = "TEST_AAF_BFS_CONN" -schema_name = "TEST_INTEGRATION" -language_alias = "PYTHON3_AAF" +BUCKETFS_CONNECTION_NAME = "TEST_AAF_BFS_CONN" -def _create_schema(db_conn) -> None: - db_conn.execute(f"DROP SCHEMA IF EXISTS {schema_name} CASCADE;") - db_conn.execute(f"CREATE SCHEMA IF NOT EXISTS {schema_name};") +@pytest.fixture(scope="session") +def db_schema_name() -> str: + """ + Overrides default fixture from pytest-exasol-extension. + """ + return "TEST_INTEGRATION" -def _deploy_scripts() -> None: - ScriptsDeployer.run( - dsn=db_params.address(), - user=db_params.user, - password=db_params.password, - schema=schema_name, - language_alias=language_alias, - develop=True) +@pytest.fixture(scope="module") +def deployed_scripts(pyexasol_connection, db_schema_name, language_alias) -> None: + save_aaf_query_loop_lua_script() + ScriptsDeployer( + language_alias, + db_schema_name, + pyexasol_connection, + ).deploy_scripts() + + +# Can be removed after +# https://github.com/exasol/advanced-analytics-framework/issues/176 +def _bucket_address( + bucketfs_params: dict[str, Any], + path_in_bucket: str = "my-folder", +) -> str: + url = bucketfs_params["url"] + bucket_name = bucketfs_params["bucket_name"] + service_name = bucketfs_params["service_name"] + return ( f"{url}/{bucket_name}/" + f"{path_in_bucket};{service_name}" ) -def _create_bucketfs_connection(db_conn) -> None: - query = "CREATE OR REPLACE CONNECTION {name} TO '{uri}' " \ - "USER '{user}' IDENTIFIED BY '{pwd}'".format( - name=bucketfs_connection_name, - uri=bucketfs_params.address(bucketfs_params.real_port), - user=bucketfs_params.user, - pwd=bucketfs_params.password) - db_conn.execute(query) +# Can be removed after +# https://github.com/exasol/advanced-analytics-framework/issues/176 +@pytest.fixture(scope='session') +def my_bucketfs_connection_factory( + use_onprem, + pyexasol_connection, + backend_aware_bucketfs_params, +) -> Callable[[str, str|None], None]: + def create(name, path_in_bucket): + if not use_onprem: + return + bucketfs_params = backend_aware_bucketfs_params + uri = _bucket_address(bucketfs_params, path_in_bucket) + user = bucketfs_params["username"] + pwd = bucketfs_params["password"] + pyexasol_connection.execute( + f"CREATE OR REPLACE CONNECTION {name} TO '{uri}' " \ + f"USER '{user}' IDENTIFIED BY '{pwd}'" + ) + return create @pytest.fixture(scope="module") -def setup_database(pyexasol_connection) -> Tuple[str, str]: - _create_schema(pyexasol_connection) - _deploy_scripts() - _create_bucketfs_connection(pyexasol_connection) - return bucketfs_connection_name, schema_name +def database_with_slc( + pyexasol_connection, + deployed_scripts, + db_schema_name, + bucketfs_connection_factory, + my_bucketfs_connection_factory, + deployed_slc, +) -> Tuple[str|None, str]: + # this requires updating query_handler_runner_udf.py to the new bucketfs API, first, + # which is planned to be done in ticket + # https://github.com/exasol/advanced-analytics-framework/issues/176 + # bucketfs_connection_factory(BUCKETFS_CONNECTION_NAME, "my-folder") + my_bucketfs_connection_factory(BUCKETFS_CONNECTION_NAME, "my-folder") + return BUCKETFS_CONNECTION_NAME, db_schema_name diff --git a/tests/integration_tests/with_db/fixtures/upload_language_container_fixture.py b/tests/integration_tests/with_db/fixtures/upload_language_container_fixture.py deleted file mode 100644 index 649670cf..00000000 --- a/tests/integration_tests/with_db/fixtures/upload_language_container_fixture.py +++ /dev/null @@ -1,37 +0,0 @@ -import pytest -from pathlib import Path - -from exasol_bucketfs_utils_python.bucketfs_factory import BucketFSFactory - -from tests.utils.parameters import bucketfs_params - - -@pytest.fixture(scope="session") -def upload_language_container(pyexasol_connection, language_container) -> str: - bucket_fs_factory = BucketFSFactory() - container_bucketfs_location = \ - bucket_fs_factory.create_bucketfs_location( - url=bucketfs_params.address(), - user=bucketfs_params.user, - pwd=bucketfs_params.password, - base_path=None) - container_path = Path(language_container["container_path"]) - alter_session = language_container["alter_session"] - language_alias = alter_session.split("=")[0] - with open(container_path, "rb") as container_file: - container_bucketfs_location.upload_fileobj_to_bucketfs( - container_file, - "exasol_advanced_analytics_framework_container.tar.gz") - - result = pyexasol_connection.execute( - f"""SELECT "SYSTEM_VALUE" FROM SYS.EXA_PARAMETERS WHERE - PARAMETER_NAME='SCRIPT_LANGUAGES'""").fetchall() - original_alter_system = result[0][0] - pyexasol_connection.execute( - f"ALTER SESSION SET SCRIPT_LANGUAGES='{alter_session}'") - pyexasol_connection.execute( - f"ALTER SYSTEM SET SCRIPT_LANGUAGES='{alter_session}'") - - yield language_alias - pyexasol_connection.execute( - f"ALTER SYSTEM SET SCRIPT_LANGUAGES='{original_alter_system}'") diff --git a/tests/integration_tests/with_db/test_query_loop_integration.py b/tests/integration_tests/with_db/test_query_loop_integration.py index 7e427cd9..3a41e300 100644 --- a/tests/integration_tests/with_db/test_query_loop_integration.py +++ b/tests/integration_tests/with_db/test_query_loop_integration.py @@ -7,11 +7,10 @@ from tests.test_package.test_query_handlers.query_handler_test import \ FINAL_RESULT, QUERY_LIST, TEST_INPUT -from tests.utils.parameters import db_params QUERY_FLUSH_STATS = """FLUSH STATISTICS""" QUERY_AUDIT_LOGS = """ -SELECT SQL_TEXT +SELECT SQL_TEXT FROM EXA_STATISTICS.EXA_DBA_AUDIT_SQL WHERE SESSION_ID = CURRENT_SESSION ORDER BY START_TIME DESC; @@ -20,8 +19,8 @@ def test_query_loop_integration_with_one_iteration( - setup_database, pyexasol_connection, upload_language_container): - bucketfs_connection_name, schema_name = setup_database + database_with_slc, pyexasol_connection): + bucketfs_connection_name, schema_name = database_with_slc args = json.dumps( { "query_handler": { @@ -51,16 +50,13 @@ def test_query_loop_integration_with_one_iteration( def test_query_loop_integration_with_one_iteration_with_not_released_child_query_handler_context( - setup_database, upload_language_container): + database_with_slc, backend_aware_database_params): # start a new db session, to isolate the EXECUTE SCRIPT and the QueryHandler queries # into its own session, for easier retrieval - conn = pyexasol.connect( - dsn=db_params.address(), - user=db_params.user, - password=db_params.password) + conn = pyexasol.connect(**backend_aware_database_params) # execute query loop - bucketfs_connection_name, schema_name = setup_database + bucketfs_connection_name, schema_name = database_with_slc args = json.dumps( { "query_handler": { @@ -90,16 +86,13 @@ def test_query_loop_integration_with_one_iteration_with_not_released_child_query def test_query_loop_integration_with_one_iteration_with_not_released_temporary_object( - setup_database, upload_language_container): + database_with_slc, backend_aware_database_params): # start a new db session, to isolate the EXECUTE SCRIPT and the QueryHandler queries # into its own session, for easier retrieval of the audit log - conn = pyexasol.connect( - dsn=db_params.address(), - user=db_params.user, - password=db_params.password) + conn = pyexasol.connect(**backend_aware_database_params) # execute query loop - bucketfs_connection_name, schema_name = setup_database + bucketfs_connection_name, schema_name = database_with_slc args = json.dumps( { "query_handler": { @@ -140,16 +133,13 @@ def test_query_loop_integration_with_one_iteration_with_not_released_temporary_o def test_query_loop_integration_with_two_iteration( - setup_database, upload_language_container): + database_with_slc, backend_aware_database_params): # start a new db session, to isolate the EXECUTE SCRIPT and the QueryHandler queries # into its own session, for easier retrieval of the audit log - conn = pyexasol.connect( - dsn=db_params.address(), - user=db_params.user, - password=db_params.password) + conn = pyexasol.connect(**backend_aware_database_params) # execute query loop - bucketfs_connection_name, schema_name = setup_database + bucketfs_connection_name, schema_name = database_with_slc args = json.dumps( { "query_handler": { diff --git a/tests/test_package/poetry.lock b/tests/test_package/poetry.lock index f41588b5..c0407bc2 100644 --- a/tests/test_package/poetry.lock +++ b/tests/test_package/poetry.lock @@ -1,8 +1,7 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. package = [] [metadata] -lock-version = "1.1" -python-versions = "^3.8" -content-hash = "fafb334cb038533f851c23d0b63254223abf72ce4f02987e7064b0c95566699a" - -[metadata.files] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "17ca553b0bb9298a6ed528dd21e544ca433179192dba32a9920168e1c199d74f" diff --git a/tests/unit_tests/data_science_utils/schema/test_column.py b/tests/unit_tests/data_science_utils/schema/test_column.py index 1d39d6e0..c9a5077b 100644 --- a/tests/unit_tests/data_science_utils/schema/test_column.py +++ b/tests/unit_tests/data_science_utils/schema/test_column.py @@ -3,6 +3,7 @@ from exasol_data_science_utils_python.schema.column import Column from exasol_data_science_utils_python.schema.column import ColumnType from exasol_data_science_utils_python.schema.column_name_builder import ColumnNameBuilder +from typeguard import TypeCheckError def test_set_new_type_fail(): @@ -18,7 +19,7 @@ def test_set_new_name_fail(): def test_wrong_types_in_constructor(): - with pytest.raises(TypeError) as c: + with pytest.raises(TypeCheckError) as c: column = Column("abc", "INTEGER") def test_equality(): diff --git a/tests/unit_tests/data_science_utils/schema/test_column_builder.py b/tests/unit_tests/data_science_utils/schema/test_column_builder.py index 9bfef5ba..79fe821e 100644 --- a/tests/unit_tests/data_science_utils/schema/test_column_builder.py +++ b/tests/unit_tests/data_science_utils/schema/test_column_builder.py @@ -3,15 +3,15 @@ from exasol_data_science_utils_python.schema.column import ColumnType from exasol_data_science_utils_python.schema.column_builder import ColumnBuilder from exasol_data_science_utils_python.schema.column_name_builder import ColumnNameBuilder - +from typeguard import TypeCheckError def test_create_column_with_name_only(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): column = ColumnBuilder().with_name(ColumnNameBuilder.create("column")).build() def test_create_column_with_type_only(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): column = ColumnBuilder().with_type(type=ColumnType("INTEGER")).build() diff --git a/tests/unit_tests/data_science_utils/schema/test_column_name_builder.py b/tests/unit_tests/data_science_utils/schema/test_column_name_builder.py index 75d139fc..77ff4704 100644 --- a/tests/unit_tests/data_science_utils/schema/test_column_name_builder.py +++ b/tests/unit_tests/data_science_utils/schema/test_column_name_builder.py @@ -3,10 +3,11 @@ from exasol_data_science_utils_python.schema.column_name import ColumnName from exasol_data_science_utils_python.schema.column_name_builder import ColumnNameBuilder from exasol_data_science_utils_python.schema.table_name_impl import TableNameImpl +from typeguard import TypeCheckError def test_using_empty_constructor(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError) as ex: column_name = ColumnNameBuilder().build() diff --git a/tests/unit_tests/data_science_utils/schema/test_column_type.py b/tests/unit_tests/data_science_utils/schema/test_column_type.py index 3ab79a5c..bdbf0dad 100644 --- a/tests/unit_tests/data_science_utils/schema/test_column_type.py +++ b/tests/unit_tests/data_science_utils/schema/test_column_type.py @@ -5,6 +5,7 @@ from exasol_data_science_utils_python.schema.schema_name import SchemaName from exasol_data_science_utils_python.schema.table_name_builder import TableNameBuilder from exasol_data_science_utils_python.schema.table_name_impl import TableNameImpl +from typeguard import TypeCheckError def test_correct_types(): @@ -28,42 +29,42 @@ def test_name_missing(): def test_name_wrong_type(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): ColumnType(name=1) def test_precision_wrong_type(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): ColumnType(name="Test", precision="") def test_scale_wrong_type(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): ColumnType(name="Test", scale="") def test_size_wrong_type(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): ColumnType(name="Test", size="") def test_characterSet_wrong_type(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): ColumnType(name="Test", characterSet=1) def test_withLocalTimeZone_wrong_type(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): ColumnType(name="Test", withLocalTimeZone=1) def test_fraction_wrong_type(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): ColumnType(name="Test", fraction="") def test_srid_wrong_type(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): ColumnType(name="Test", fraction="") diff --git a/tests/unit_tests/data_science_utils/schema/test_identifier.py b/tests/unit_tests/data_science_utils/schema/test_identifier.py index 956dd0b7..31a9c50c 100644 --- a/tests/unit_tests/data_science_utils/schema/test_identifier.py +++ b/tests/unit_tests/data_science_utils/schema/test_identifier.py @@ -5,6 +5,7 @@ class TestSchemaElement(ExasolIdentifierImpl): + __test__ = False def __init__(self, name: str): super().__init__(name) diff --git a/tests/unit_tests/data_science_utils/schema/test_table.py b/tests/unit_tests/data_science_utils/schema/test_table.py index 8a9654d4..8ca4fe95 100644 --- a/tests/unit_tests/data_science_utils/schema/test_table.py +++ b/tests/unit_tests/data_science_utils/schema/test_table.py @@ -5,6 +5,7 @@ from exasol_data_science_utils_python.schema.column_name_builder import ColumnNameBuilder from exasol_data_science_utils_python.schema.table import Table from exasol_data_science_utils_python.schema.table_name_impl import TableNameImpl +from typeguard import TypeCheckError def test_valid(): @@ -40,7 +41,7 @@ def test_set_new_columns_fail(): def test_wrong_types_in_constructor(): - with pytest.raises(TypeError) as c: + with pytest.raises(TypeCheckError) as c: column = Table("abc", "INTEGER") diff --git a/tests/unit_tests/data_science_utils/schema/test_table_builder.py b/tests/unit_tests/data_science_utils/schema/test_table_builder.py index 08c6825a..cf1b8620 100644 --- a/tests/unit_tests/data_science_utils/schema/test_table_builder.py +++ b/tests/unit_tests/data_science_utils/schema/test_table_builder.py @@ -5,15 +5,16 @@ from exasol_data_science_utils_python.schema.column_name_builder import ColumnNameBuilder from exasol_data_science_utils_python.schema.table_builder import TableBuilder from exasol_data_science_utils_python.schema.table_name_impl import TableNameImpl +from typeguard import TypeCheckError def test_create_table_with_name_only_fail(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): column = TableBuilder().with_name(TableNameImpl("table")).build() def test_create_table_with_columns_only_fail(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): column = TableBuilder().with_columns([Column(ColumnNameBuilder.create("abc"), ColumnType("INTEGER"))]).build() diff --git a/tests/unit_tests/data_science_utils/schema/test_table_name_builder.py b/tests/unit_tests/data_science_utils/schema/test_table_name_builder.py index 2a428194..e76c0e6b 100644 --- a/tests/unit_tests/data_science_utils/schema/test_table_name_builder.py +++ b/tests/unit_tests/data_science_utils/schema/test_table_name_builder.py @@ -4,10 +4,11 @@ from exasol_data_science_utils_python.schema.table_name import TableName from exasol_data_science_utils_python.schema.table_name_builder import TableNameBuilder from exasol_data_science_utils_python.schema.table_name_impl import TableNameImpl +from typeguard import TypeCheckError def test_using_empty_constructor(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): table_name = TableNameBuilder().build() diff --git a/tests/unit_tests/data_science_utils/schema/test_udf_name_builder.py b/tests/unit_tests/data_science_utils/schema/test_udf_name_builder.py index 3c9a6a00..7fd206f8 100644 --- a/tests/unit_tests/data_science_utils/schema/test_udf_name_builder.py +++ b/tests/unit_tests/data_science_utils/schema/test_udf_name_builder.py @@ -4,10 +4,11 @@ from exasol_data_science_utils_python.schema.udf_name import UDFName from exasol_data_science_utils_python.schema.udf_name_builder import UDFNameBuilder from exasol_data_science_utils_python.schema.udf_name_impl import UDFNameImpl +from typeguard import TypeCheckError def test_using_empty_constructor(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): udf_name = UDFNameBuilder().build() diff --git a/tests/unit_tests/data_science_utils/schema/test_view.py b/tests/unit_tests/data_science_utils/schema/test_view.py index 59f6632f..cfd24e12 100644 --- a/tests/unit_tests/data_science_utils/schema/test_view.py +++ b/tests/unit_tests/data_science_utils/schema/test_view.py @@ -5,6 +5,7 @@ from exasol_data_science_utils_python.schema.column_name_builder import ColumnNameBuilder from exasol_data_science_utils_python.schema.view import View from exasol_data_science_utils_python.schema.view_name_impl import ViewNameImpl +from typeguard import TypeCheckError def test_valid(): @@ -40,7 +41,7 @@ def test_set_new_columns_fail(): def test_wrong_types_in_constructor(): - with pytest.raises(TypeError) as c: + with pytest.raises(TypeCheckError) as c: column = View("abc", "INTEGER") diff --git a/tests/unit_tests/data_science_utils/schema/test_view_name_builder.py b/tests/unit_tests/data_science_utils/schema/test_view_name_builder.py index 6526d161..31cc1ef5 100644 --- a/tests/unit_tests/data_science_utils/schema/test_view_name_builder.py +++ b/tests/unit_tests/data_science_utils/schema/test_view_name_builder.py @@ -4,10 +4,11 @@ from exasol_data_science_utils_python.schema.view_name import ViewName from exasol_data_science_utils_python.schema.view_name_builder import ViewNameBuilder from exasol_data_science_utils_python.schema.view_name_impl import ViewNameImpl +from typeguard import TypeCheckError def test_using_empty_constructor(): - with pytest.raises(TypeError): + with pytest.raises(TypeCheckError): view_name = ViewNameBuilder().build() diff --git a/tests/unit_tests/query_handler/test_query_handler_interface.py b/tests/unit_tests/query_handler/test_query_handler_interface.py index 8da4d9df..874b94a9 100644 --- a/tests/unit_tests/query_handler/test_query_handler_interface.py +++ b/tests/unit_tests/query_handler/test_query_handler_interface.py @@ -17,6 +17,7 @@ class TestQueryHandler(QueryHandler[Dict[str, Any], int]): + __test__ = False def __init__(self, parameter: Dict[str, Any], query_handler_context: ScopeQueryHandlerContext): super().__init__(parameter, query_handler_context) diff --git a/tests/unit_tests/query_handler_runner/test_mock_query_handler_runner.py b/tests/unit_tests/query_handler_runner/test_mock_query_handler_runner.py index 98ccda05..14698bb4 100644 --- a/tests/unit_tests/query_handler_runner/test_mock_query_handler_runner.py +++ b/tests/unit_tests/query_handler_runner/test_mock_query_handler_runner.py @@ -46,6 +46,7 @@ class TestInput: class TestOutput: + __test__ = False def __init__(self, test_input: TestInput): self.test_input = test_input diff --git a/tests/unit_tests/udf_communication/peer_communication/background_thread/connection_closer/test_connection_closer.py b/tests/unit_tests/udf_communication/peer_communication/background_thread/connection_closer/test_connection_closer.py index f820cb1c..017b1eb1 100644 --- a/tests/unit_tests/udf_communication/peer_communication/background_thread/connection_closer/test_connection_closer.py +++ b/tests/unit_tests/udf_communication/peer_communication/background_thread/connection_closer/test_connection_closer.py @@ -20,6 +20,7 @@ @dataclasses.dataclass() class TestSetup: + __test__ = False peer: Peer my_connection_info: ConnectionInfo sender_mock: Union[MagicMock, Sender] diff --git a/tests/unit_tests/udf_communication/peer_communication/background_thread/connection_closer/test_connection_closer_builder.py b/tests/unit_tests/udf_communication/peer_communication/background_thread/connection_closer/test_connection_closer_builder.py index c56d6af0..a67f4a60 100644 --- a/tests/unit_tests/udf_communication/peer_communication/background_thread/connection_closer/test_connection_closer_builder.py +++ b/tests/unit_tests/udf_communication/peer_communication/background_thread/connection_closer/test_connection_closer_builder.py @@ -27,6 +27,7 @@ @dataclasses.dataclass() class TestSetup: + __test__ = False peer: Peer my_connection_info: ConnectionInfo out_control_socket_mock: Union[MagicMock, Socket] diff --git a/tests/unit_tests/udf_communication/peer_communication/background_thread/connection_closer/test_connection_is_closed_sender.py b/tests/unit_tests/udf_communication/peer_communication/background_thread/connection_closer/test_connection_is_closed_sender.py index 5f4179a7..784ca943 100644 --- a/tests/unit_tests/udf_communication/peer_communication/background_thread/connection_closer/test_connection_is_closed_sender.py +++ b/tests/unit_tests/udf_communication/peer_communication/background_thread/connection_closer/test_connection_is_closed_sender.py @@ -18,6 +18,7 @@ @dataclasses.dataclass() class TestSetup: + __test__ = False peer: Peer timer_mock: Union[MagicMock, Timer] out_control_socket_mock: Union[MagicMock, Socket] diff --git a/tests/unit_tests/udf_communication/peer_communication/test_abort_timeout_sender.py b/tests/unit_tests/udf_communication/peer_communication/test_abort_timeout_sender.py index e58bd181..9af3e29c 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_abort_timeout_sender.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_abort_timeout_sender.py @@ -21,6 +21,7 @@ def mock_cast(obj: Any) -> Mock: @dataclasses.dataclass() class TestSetup: + __test__ = False reason: str timer_mock: Union[MagicMock, Timer] out_control_socket_mock: Union[MagicMock, Socket] diff --git a/tests/unit_tests/udf_communication/peer_communication/test_acknowledge_register_peer_sender.py b/tests/unit_tests/udf_communication/peer_communication/test_acknowledge_register_peer_sender.py index 5223618b..91bc1092 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_acknowledge_register_peer_sender.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_acknowledge_register_peer_sender.py @@ -17,6 +17,7 @@ @dataclasses.dataclass class TestSetup: + __test__ = False my_connection_info: ConnectionInfo peer: Peer register_peer_connection_mock: Union[RegisterPeerConnection, MagicMock] diff --git a/tests/unit_tests/udf_communication/peer_communication/test_background_peer_state.py b/tests/unit_tests/udf_communication/peer_communication/test_background_peer_state.py index e4308d2c..b291382d 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_background_peer_state.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_background_peer_state.py @@ -26,6 +26,7 @@ @dataclasses.dataclass() class TestSetup: + __test__ = False peer: Peer my_connection_info: ConnectionInfo payload_handler_mock: Union[MagicMock, PayloadHandler] diff --git a/tests/unit_tests/udf_communication/peer_communication/test_connection_establisher.py b/tests/unit_tests/udf_communication/peer_communication/test_connection_establisher.py index 842ec65e..001c87b0 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_connection_establisher.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_connection_establisher.py @@ -19,6 +19,7 @@ @dataclasses.dataclass() class TestSetup: + __test__ = False peer: Peer my_connection_info: ConnectionInfo sender_mock: Union[MagicMock, Sender] diff --git a/tests/unit_tests/udf_communication/peer_communication/test_connection_establisher_builder.py b/tests/unit_tests/udf_communication/peer_communication/test_connection_establisher_builder.py index 937970f4..4a9218e1 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_connection_establisher_builder.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_connection_establisher_builder.py @@ -26,6 +26,7 @@ @dataclasses.dataclass() class TestSetup: + __test__ = False peer: Peer my_connection_info: ConnectionInfo out_control_socket_mock: Union[MagicMock, Socket] diff --git a/tests/unit_tests/udf_communication/peer_communication/test_connection_is_ready_sender.py b/tests/unit_tests/udf_communication/peer_communication/test_connection_is_ready_sender.py index dd987da8..f7678dc2 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_connection_is_ready_sender.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_connection_is_ready_sender.py @@ -18,6 +18,7 @@ @dataclasses.dataclass() class TestSetup: + __test__ = False peer: Peer timer_mock: Union[MagicMock, Timer] out_control_socket_mock: Union[MagicMock, Socket] diff --git a/tests/unit_tests/udf_communication/peer_communication/test_payload_handler.py b/tests/unit_tests/udf_communication/peer_communication/test_payload_handler.py index ca03dc89..c69ab3ad 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_payload_handler.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_payload_handler.py @@ -15,6 +15,7 @@ @dataclasses.dataclass class TestSetup: + __test__ = False payload_sender_mock: Union[MagicMock, PayloadSender] payload_receiver_mock: Union[MagicMock, PayloadReceiver] payload_handler: PayloadHandler diff --git a/tests/unit_tests/udf_communication/peer_communication/test_payload_message_sender.py b/tests/unit_tests/udf_communication/peer_communication/test_payload_message_sender.py index d005f546..c0d1d5bf 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_payload_message_sender.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_payload_message_sender.py @@ -19,6 +19,7 @@ @dataclasses.dataclass(frozen=True) class TestSetup: + __test__ = False sender_mock: Union[Sender, MagicMock] abort_time_mock: Union[Timer, MagicMock] out_control_socket_mock: Union[Socket, MagicMock] diff --git a/tests/unit_tests/udf_communication/peer_communication/test_payload_receiver.py b/tests/unit_tests/udf_communication/peer_communication/test_payload_receiver.py index 6982d515..d65b29ee 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_payload_receiver.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_payload_receiver.py @@ -15,6 +15,7 @@ @dataclasses.dataclass class TestSetup: + __test__ = False sender_mock: Union[MagicMock, Sender] out_control_socket_mock: Union[MagicMock, Socket] my_connection_info: ConnectionInfo diff --git a/tests/unit_tests/udf_communication/peer_communication/test_payload_sender.py b/tests/unit_tests/udf_communication/peer_communication/test_payload_sender.py index a029e444..b4f64de9 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_payload_sender.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_payload_sender.py @@ -22,6 +22,7 @@ @dataclasses.dataclass class TestSetup: + __test__ = False sender_mock: Union[MagicMock, Sender] out_control_socket_mock: Union[MagicMock, Socket] clock_mock: Union[MagicMock, Clock] diff --git a/tests/unit_tests/udf_communication/peer_communication/test_register_peer_forwarder_is_ready_sender.py b/tests/unit_tests/udf_communication/peer_communication/test_register_peer_forwarder_is_ready_sender.py index 5bdddefe..bc4a7193 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_register_peer_forwarder_is_ready_sender.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_register_peer_forwarder_is_ready_sender.py @@ -20,6 +20,7 @@ @dataclasses.dataclass class TestSetup: + __test__ = False my_connection_info: ConnectionInfo peer: Peer timer_mock: Union[Timer, MagicMock] diff --git a/tests/unit_tests/udf_communication/peer_communication/test_register_peer_sender.py b/tests/unit_tests/udf_communication/peer_communication/test_register_peer_sender.py index 43942111..0581cb06 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_register_peer_sender.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_register_peer_sender.py @@ -17,6 +17,7 @@ @dataclasses.dataclass() class TestSetup: + __test__ = False peer: Peer timer_mock: Union[MagicMock, Timer] register_peer_connection: Union[MagicMock, RegisterPeerConnection] diff --git a/tests/unit_tests/udf_communication/peer_communication/test_synchronize_connection_sender.py b/tests/unit_tests/udf_communication/peer_communication/test_synchronize_connection_sender.py index 35b3c3a0..bf54b25b 100644 --- a/tests/unit_tests/udf_communication/peer_communication/test_synchronize_connection_sender.py +++ b/tests/unit_tests/udf_communication/peer_communication/test_synchronize_connection_sender.py @@ -15,6 +15,7 @@ @dataclasses.dataclass() class TestSetup: + __test__ = False peer: Peer my_connection_info: ConnectionInfo timer_mock: Union[MagicMock, Timer] diff --git a/tests/unit_tests/udf_framework/test_json_udf_query_handler_factory.py b/tests/unit_tests/udf_framework/test_json_udf_query_handler_factory.py index 6c5ed94a..922dce5a 100644 --- a/tests/unit_tests/udf_framework/test_json_udf_query_handler_factory.py +++ b/tests/unit_tests/udf_framework/test_json_udf_query_handler_factory.py @@ -39,6 +39,7 @@ def top_level_query_handler_context(tmp_path, class TestJSONQueryHandler(JSONQueryHandler): + __test__ = False def __init__(self, parameter: JSONType, query_handler_context: ScopeQueryHandlerContext): super().__init__(parameter, query_handler_context) self._parameter = parameter @@ -51,6 +52,7 @@ def handle_query_result(self, query_result: QueryResult) -> Union[Continue, Fini class TestJsonUDFQueryHandlerFactory(JsonUDFQueryHandlerFactory): + __test__ = False def __init__(self): super().__init__(TestJSONQueryHandler) diff --git a/tests/utils/parameters.py b/tests/utils/parameters.py deleted file mode 100644 index 6051c59c..00000000 --- a/tests/utils/parameters.py +++ /dev/null @@ -1,46 +0,0 @@ -from dataclasses import dataclass - - -@dataclass(frozen=True) -class DBParams: - host: str - port: str - user: str - password: str - - def address(self) -> str: - return f"{self.host}:{self.port}" - - -@dataclass(frozen=True) -class BucketFSParams: - host: str - port: str - real_port: str - user: str - password: str - name: str - bucket: str - path_in_bucket: str - - def address(self, port=None) -> str: - port = self.port if not port else port - return f"http://{self.host}:{port}/{self.bucket}/" \ - f"{self.path_in_bucket};{self.name}" - - -db_params = DBParams( - host="127.0.0.1", - port="8888", - user="sys", - password="exasol") - -bucketfs_params = BucketFSParams( - host="127.0.0.1", - port="6666", - real_port="2580", - user="w", - password="write", - name="bfsdefault", - bucket="default", - path_in_bucket="container") diff --git a/tests/utils/revert_language_settings.py b/tests/utils/revert_language_settings.py index 3ca82223..7b0a6f54 100644 --- a/tests/utils/revert_language_settings.py +++ b/tests/utils/revert_language_settings.py @@ -1,26 +1,15 @@ -import pyexasol -import pytest -from tests.utils.parameters import db_params +import contextlib +import pyexasol # type: ignore - -def revert_language_settings(func): - def wrapper(language_alias, schema, db_conn, - container_path, language_settings): - try: - return func(language_alias, schema, db_conn, - container_path, language_settings) - except Exception as exc: - print("Exception occurred while running the test: %s" % exc) - raise pytest.fail(exc) - finally: - print("Revert language settings") - db_conn_revert = pyexasol.connect( - dsn=db_params.address(), - user=db_params.user, - password=db_params.password) - db_conn_revert.execute(f"ALTER SYSTEM SET SCRIPT_LANGUAGES=" - f"'{language_settings[0][0]}';") - db_conn_revert.execute(f"ALTER SESSION SET SCRIPT_LANGUAGES=" - f"'{language_settings[0][1]}';") - - return wrapper +@contextlib.contextmanager +def revert_language_settings(connection: pyexasol.ExaConnection): + query = f""" + SELECT "SYSTEM_VALUE", "SESSION_VALUE" + FROM SYS.EXA_PARAMETERS + WHERE PARAMETER_NAME='SCRIPT_LANGUAGES'""" + language_settings = connection.execute(query).fetchall()[0] + try: + yield + finally: + connection.execute(f"ALTER SYSTEM SET SCRIPT_LANGUAGES='{language_settings[0]}';") + connection.execute(f"ALTER SESSION SET SCRIPT_LANGUAGES='{language_settings[1]}';")