diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 683bef8..0f1c51d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,9 +26,16 @@ repos: hooks: - id: black + - repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + args: ["--profile", "black", "--filter-files"] + - repo: https://github.com/asottile/pyupgrade rev: v3.3.1 hooks: - id: pyupgrade args: - "--py38-plus" + diff --git a/CHANGELOG.md b/CHANGELOG.md index be082c3..24eaed0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,7 @@ * Documentation interne * Tests unitaires pour la classe RasterSet * Classe Raster : constructeur à partir des paramètres - + * Pyramid * Fonction de calcul de la taille d'une pyramide * Générateur de lecture de la liste du contenu @@ -75,7 +75,7 @@ Lecture par système de fichier virtuel avec GDAL * Utils * Meilleure gestion de reprojection par `reproject_bbox` : on détecte des systèmes identiques en entrée ou quand seul l'ordre des axes changent, pour éviter le calcul * Ajout de la fonction de reprojection d'un point `reproject_point` : on détecte des systèmes identiques en entrée ou quand seul l'ordre des axes changent, pour éviter le calcul - + ### [Changed] * Utils : @@ -98,8 +98,8 @@ Ajout de fonctionnalités de lecture de donnée d'une pyramide et suivi des reco * Storage : * Fonction de lecture binaire, complète ou partielle, d'un fichier ou objet S3 ou CEPH * Exceptions : NotImplementedError permet de préciser qu'une fonctionnalité n'a pas été implémentée pour tous les cas. Ici, on ne gère pas la décompression des données raster pour les compressions packbit et LZW - -* Ajout de la publication PyPI dans la CI GitHub + +* Ajout de la publication PyPI dans la CI GitHub ### [Changed] @@ -181,4 +181,4 @@ Initialisation des librairies Python utilisées par les outils python à venir d * Librairie de gestion d'un descripteur de pyramide * chargement depuis un descripteur ou par clone (avec changement de stockage) * écriture du descripteur -* Tests unitaires couvrant ces librairies \ No newline at end of file +* Tests unitaires couvrant ces librairies diff --git a/pyproject.toml b/pyproject.toml index 4e1599b..f172c36 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ doc = [ dev = [ "black", + "isort >= 5.12.0", "pre-commit >3,<4" ] diff --git a/src/rok4/Layer.py b/src/rok4/Layer.py index 316a9ca..74387ae 100644 --- a/src/rok4/Layer.py +++ b/src/rok4/Layer.py @@ -5,16 +5,16 @@ - `Layer` - Descriptor to broadcast pyramids' data """ -from typing import Dict, List, Tuple, Union import json -from json.decoder import JSONDecodeError import os import re +from json.decoder import JSONDecodeError +from typing import Dict, List, Tuple, Union from rok4.Exceptions import * from rok4.Pyramid import Pyramid, PyramidType -from rok4.TileMatrixSet import TileMatrixSet from rok4.Storage import * +from rok4.TileMatrixSet import TileMatrixSet from rok4.Utils import * diff --git a/src/rok4/Pyramid.py b/src/rok4/Pyramid.py index 80b9b55..a06c73b 100644 --- a/src/rok4/Pyramid.py +++ b/src/rok4/Pyramid.py @@ -6,20 +6,21 @@ - `Level` - Level of a pyramid """ -from typing import Dict, List, Tuple, Union, Iterator +import io import json -from json.decoder import JSONDecodeError import os import re -import numpy import zlib -import io +from json.decoder import JSONDecodeError +from typing import Dict, Iterator, List, Tuple, Union + import mapbox_vector_tile +import numpy from PIL import Image from rok4.Exceptions import * -from rok4.TileMatrixSet import TileMatrixSet, TileMatrix from rok4.Storage import * +from rok4.TileMatrixSet import TileMatrix, TileMatrixSet from rok4.Utils import * @@ -549,11 +550,8 @@ def serializable(self) -> Dict: Returns: Dict: descriptor structured object description """ - - serialization = { - "tile_matrix_set": self.__tms.name, - "format": self.__format - } + + serialization = {"tile_matrix_set": self.__tms.name, "format": self.__format} serialization["levels"] = [] sorted_levels = sorted(self.__levels.values(), key=lambda l: l.resolution, reverse=True) @@ -620,7 +618,7 @@ def storage_root(self) -> str: Returns: str: Pyramid's storage root """ - + return self.__storage["root"].split("@", 1)[ 0 ] # Suppression de l'éventuel hôte de spécification du cluster S3 @@ -670,7 +668,6 @@ def format(self) -> str: @property def tile_extension(self) -> str: - if self.__format in [ "TIFF_RAW_UINT8", "TIFF_LZW_UINT8", @@ -835,7 +832,7 @@ def get_level(self, level_id: str) -> "Level": Returns: The corresponding pyramid's level, None if not present """ - + return self.__levels.get(level_id, None) def get_levels(self, bottom_id: str = None, top_id: str = None) -> List[Level]: @@ -1019,7 +1016,6 @@ def get_slab_path_from_infos( else: return slab_path - def get_tile_data_binary(self, level: str, column: int, row: int) -> str: """Get a pyramid's tile as binary string @@ -1182,7 +1178,6 @@ def get_tile_data_raster(self, level: str, column: int, row: int) -> numpy.ndarr level_object = self.get_level(level) if self.__format == "TIFF_JPG_UINT8" or self.__format == "TIFF_JPG90_UINT8": - try: img = Image.open(io.BytesIO(binary_tile)) except Exception as e: @@ -1379,6 +1374,8 @@ def size(self) -> int: Returns: int: size of the pyramid """ - if not hasattr(self,"_Pyramid__size") : - self.__size = size_path(get_path_from_infos(self.__storage["type"], self.__storage["root"], self.__name)) + if not hasattr(self, "_Pyramid__size"): + self.__size = size_path( + get_path_from_infos(self.__storage["type"], self.__storage["root"], self.__name) + ) return self.__size diff --git a/src/rok4/Raster.py b/src/rok4/Raster.py index 640f4bf..271fc6a 100644 --- a/src/rok4/Raster.py +++ b/src/rok4/Raster.py @@ -10,9 +10,9 @@ import json import re from enum import Enum -from typing import Tuple, Dict +from typing import Dict, Tuple -from osgeo import ogr, gdal +from osgeo import gdal, ogr from rok4.Storage import exists, get_osgeo_path, put_data_str from rok4.Utils import ColorFormat, compute_bbox, compute_format diff --git a/src/rok4/Storage.py b/src/rok4/Storage.py index c8c685e..20dc874 100644 --- a/src/rok4/Storage.py +++ b/src/rok4/Storage.py @@ -30,17 +30,18 @@ To precise the cluster to use, bucket name should be bucket_name@s3.storage.fr or bucket_name@s4.storage.fr. If no host is defined (no @) in the bucket name, first S3 cluster is used """ +import hashlib +import os +import re +import tempfile +from enum import Enum +from shutil import copyfile +from typing import Dict, List, Tuple, Union + import boto3 import botocore.exceptions -import tempfile -import re -import os import rados -import hashlib import requests -from typing import Dict, List, Tuple, Union -from enum import Enum -from shutil import copyfile from osgeo import gdal gdal.UseExceptions() @@ -75,7 +76,7 @@ def __get_s3_client(bucket_name: str) -> Tuple[Dict[str, Union["boto3.client", s Returns: Tuple[Dict[str, Union['boto3.client',str]], str, str]: the S3 informations (client, host, key, secret) and the simple bucket name """ - + global __S3_CLIENTS, __S3_DEFAULT_CLIENT if not __S3_CLIENTS: @@ -134,7 +135,7 @@ def __get_s3_client(bucket_name: str) -> Tuple[Dict[str, Union["boto3.client", s def disconnect_s3_clients() -> None: """Clean S3 clients""" - + global __S3_CLIENTS, __S3_DEFAULT_CLIENT __S3_CLIENTS = {} __S3_DEFAULT_CLIENT = None @@ -363,16 +364,15 @@ def get_data_binary(path: str, range: Tuple[int, int] = None) -> str: raise StorageError("FILE", e) elif storage_type == StorageType.HTTP or storage_type == StorageType.HTTPS: - - if range is None : + if range is None: try: reponse = requests.get(f"{storage_type.value}{path}", stream=True) data = reponse.content - if reponse.status_code == 404 : + if reponse.status_code == 404: raise FileNotFoundError(f"{storage_type.value}{path}") except Exception as e: raise StorageError(storage_type.name, e) - else : + else: raise NotImplementedError else: @@ -471,7 +471,6 @@ def get_size(path: str) -> int: raise StorageError("FILE", e) elif storage_type == StorageType.HTTP or storage_type == StorageType.HTTPS: - try: # Le stream=True permet de ne télécharger que le header initialement reponse = requests.get(storage_type.value + path, stream=True).headers["content-length"] @@ -526,12 +525,11 @@ def exists(path: str) -> bool: return os.path.exists(path) elif storage_type == StorageType.HTTP or storage_type == StorageType.HTTPS: - try: response = requests.get(storage_type.value + path, stream=True) - if response.status_code == 200 : + if response.status_code == 200: return True - else : + else: return False except Exception as e: raise StorageError(storage_type.name, e) @@ -839,43 +837,52 @@ def copy(from_path: str, to_path: str, from_md5: str = None) -> None: f"CEPH and S3", f"Cannot copy CEPH object {from_path} to S3 object {to_path} : {e}" ) - elif (from_type == StorageType.HTTP or from_type == StorageType.HTTPS) and to_type == StorageType.FILE : - + elif ( + from_type == StorageType.HTTP or from_type == StorageType.HTTPS + ) and to_type == StorageType.FILE: try: - response = requests.get(from_type.value + from_path, stream = True) + response = requests.get(from_type.value + from_path, stream=True) with open(to_path, "wb") as f: - for chunk in response.iter_content(chunk_size=65536) : + for chunk in response.iter_content(chunk_size=65536): if chunk: f.write(chunk) except Exception as e: - raise StorageError(f"HTTP(S) and FILE", f"Cannot copy HTTP(S) object {from_path} to FILE object {to_path} : {e}") - - elif (from_type == StorageType.HTTP or from_type == StorageType.HTTPS) and to_type == StorageType.CEPH : + raise StorageError( + f"HTTP(S) and FILE", + f"Cannot copy HTTP(S) object {from_path} to FILE object {to_path} : {e}", + ) + elif ( + from_type == StorageType.HTTP or from_type == StorageType.HTTPS + ) and to_type == StorageType.CEPH: to_ioctx = __get_ceph_ioctx(to_tray) try: - response = requests.get(from_type.value + from_path, stream = True) + response = requests.get(from_type.value + from_path, stream=True) offset = 0 - for chunk in response.iter_content(chunk_size=65536) : + for chunk in response.iter_content(chunk_size=65536): if chunk: size = len(chunk) to_ioctx.write(to_base_name, chunk, offset) offset += size except Exception as e: - raise StorageError(f"HTTP(S) and CEPH", f"Cannot copy HTTP(S) object {from_path} to CEPH object {to_path} : {e}") - - elif (from_type == StorageType.HTTP or from_type == StorageType.HTTPS) and to_type == StorageType.S3 : + raise StorageError( + f"HTTP(S) and CEPH", + f"Cannot copy HTTP(S) object {from_path} to CEPH object {to_path} : {e}", + ) + elif ( + from_type == StorageType.HTTP or from_type == StorageType.HTTPS + ) and to_type == StorageType.S3: to_s3_client, to_bucket = __get_s3_client(to_tray) try: - response = requests.get(from_type.value + from_path, stream = True) - with tempfile.NamedTemporaryFile("w+b",delete=False) as f: + response = requests.get(from_type.value + from_path, stream=True) + with tempfile.NamedTemporaryFile("w+b", delete=False) as f: name_fich = f.name - for chunk in response.iter_content(chunk_size=65536) : + for chunk in response.iter_content(chunk_size=65536): if chunk: f.write(chunk) @@ -884,7 +891,10 @@ def copy(from_path: str, to_path: str, from_md5: str = None) -> None: os.remove(name_fich) except Exception as e: - raise StorageError(f"HTTP(S) and S3", f"Cannot copy HTTP(S) object {from_path} to S3 object {to_path} : {e}") + raise StorageError( + f"HTTP(S) and S3", + f"Cannot copy HTTP(S) object {from_path} to S3 object {to_path} : {e}", + ) else: raise StorageError( @@ -994,7 +1004,8 @@ def get_osgeo_path(path: str) -> str: else: raise NotImplementedError(f"Cannot get a GDAL/OGR compliant path from {path}") -def size_path(path: str) -> int : + +def size_path(path: str) -> int: """Return the size of the path given (or, for the CEPH, the sum of the size of each object of the .list) Args: @@ -1007,10 +1018,10 @@ def size_path(path: str) -> int : Returns: int: size of the path """ - storage_type, unprefixed_path, tray_name, base_name = get_infos_from_path(path) + storage_type, unprefixed_path, tray_name, base_name = get_infos_from_path(path) if storage_type == StorageType.FILE: - try : + try: total = 0 with os.scandir(unprefixed_path) as it: for entry in it: @@ -1025,24 +1036,23 @@ def size_path(path: str) -> int : elif storage_type == StorageType.S3: s3_client, bucket_name = __get_s3_client(tray_name) - try : - paginator = s3_client["client"].get_paginator('list_objects_v2') + try: + paginator = s3_client["client"].get_paginator("list_objects_v2") pages = paginator.paginate( Bucket=bucket_name, - Prefix=base_name+"/", + Prefix=base_name + "/", PaginationConfig={ - 'PageSize': 10000, - } + "PageSize": 10000, + }, ) total = 0 for page in pages: - for key in page['Contents']: - total += key['Size'] + for key in page["Contents"]: + total += key["Size"] except Exception as e: raise StorageError("S3", e) - elif storage_type == StorageType.CEPH: raise NotImplementedError else: diff --git a/src/rok4/TileMatrixSet.py b/src/rok4/TileMatrixSet.py index 472810c..3000492 100644 --- a/src/rok4/TileMatrixSet.py +++ b/src/rok4/TileMatrixSet.py @@ -9,15 +9,15 @@ - ROK4_TMS_DIRECTORY """ +import json +import os +from json.decoder import JSONDecodeError +from typing import Dict, List, Tuple + from rok4.Exceptions import * from rok4.Storage import get_data_str from rok4.Utils import * -from typing import Dict, List, Tuple -from json.decoder import JSONDecodeError -import json -import os - class TileMatrix: """A tile matrix is a tile matrix set's level. diff --git a/src/rok4/Utils.py b/src/rok4/Utils.py index da15aef..125e391 100644 --- a/src/rok4/Utils.py +++ b/src/rok4/Utils.py @@ -3,10 +3,10 @@ import os import re - -from typing import Dict, List, Tuple, Union -from osgeo import ogr, osr, gdal from enum import Enum +from typing import Dict, List, Tuple, Union + +from osgeo import gdal, ogr, osr ogr.UseExceptions() osr.UseExceptions() diff --git a/src/rok4/Vector.py b/src/rok4/Vector.py index 94e6def..46656c2 100644 --- a/src/rok4/Vector.py +++ b/src/rok4/Vector.py @@ -6,12 +6,14 @@ """ -from osgeo import ogr -from rok4.Storage import get_osgeo_path, copy -from rok4.Exceptions import * import os import tempfile +from osgeo import ogr + +from rok4.Exceptions import * +from rok4.Storage import copy, get_osgeo_path + # Enable GDAL/OGR exceptions ogr.UseExceptions() diff --git a/tests/test_Layer.py b/tests/test_Layer.py index 0a60d5c..a35f649 100644 --- a/tests/test_Layer.py +++ b/tests/test_Layer.py @@ -1,12 +1,12 @@ -from rok4.Layer import Layer -from rok4.Pyramid import PyramidType -from rok4.Exceptions import * +import os +from unittest import mock +from unittest.mock import * import pytest -import os -from unittest.mock import * -from unittest import mock +from rok4.Exceptions import * +from rok4.Layer import Layer +from rok4.Pyramid import PyramidType @mock.patch.dict(os.environ, {}, clear=True) diff --git a/tests/test_Pyramid.py b/tests/test_Pyramid.py index 6eebfd7..f5f1d60 100644 --- a/tests/test_Pyramid.py +++ b/tests/test_Pyramid.py @@ -1,13 +1,14 @@ +import os +from unittest import mock +from unittest.mock import * + +import pytest + +from rok4.Exceptions import * from rok4.Pyramid import * -from rok4.TileMatrixSet import TileMatrixSet from rok4.Storage import StorageType +from rok4.TileMatrixSet import TileMatrixSet from rok4.Utils import * -from rok4.Exceptions import * - -import pytest -import os -from unittest.mock import * -from unittest import mock @mock.patch("rok4.Pyramid.get_data_str", side_effect=StorageError("FILE", "Not found")) diff --git a/tests/test_Raster.py b/tests/test_Raster.py index de35de1..cc79846 100644 --- a/tests/test_Raster.py +++ b/tests/test_Raster.py @@ -6,17 +6,17 @@ """ import copy -import math import json +import math import random +from unittest import TestCase, mock +from unittest.mock import MagicMock, Mock, call, mock_open, patch + import pytest -from unittest import mock, TestCase -from unittest.mock import call, MagicMock, Mock, mock_open, patch from rok4.Raster import Raster, RasterSet from rok4.Utils import ColorFormat - # rok4.Raster.Raster class tests diff --git a/tests/test_Storage.py b/tests/test_Storage.py index 9779954..f7b665d 100644 --- a/tests/test_Storage.py +++ b/tests/test_Storage.py @@ -1,14 +1,13 @@ -from rok4.Storage import * -from rok4.Exceptions import * - -import pytest import os +from unittest import mock +from unittest.mock import * import botocore.exceptions +import pytest from rados import ObjectNotFound -from unittest import mock -from unittest.mock import * +from rok4.Exceptions import * +from rok4.Storage import * @mock.patch.dict(os.environ, {}, clear=True) @@ -21,6 +20,7 @@ def test_hash_file_ok(mock_file): except Exception as exc: assert False, f"FILE md5 sum raises an exception: {exc}" + @mock.patch.dict(os.environ, {}, clear=True) def test_get_infos_from_path(): assert (StorageType.S3, "toto/titi", "toto", "titi") == get_infos_from_path("s3://toto/titi") @@ -103,6 +103,7 @@ def test_file_read_ok(mock_file): except Exception as exc: assert False, f"FILE read raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -117,6 +118,7 @@ def test_s3_read_nok(mocked_s3_client): with pytest.raises(StorageError): data = get_data_str("s3://bucket/path/to/object") + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -159,8 +161,9 @@ def test_ceph_read_ok(mocked_rados_client): except Exception as exc: assert False, f"CEPH read raises an exception: {exc}" + @mock.patch.dict(os.environ, {}, clear=True) -@mock.patch("requests.get", side_effect={"status_code":404}) +@mock.patch("requests.get", side_effect={"status_code": 404}) def test_http_read_error(mock_http): with pytest.raises(StorageError): requests_instance = MagicMock() @@ -171,28 +174,31 @@ def test_http_read_error(mock_http): mock_http.assert_called_with("http://path/to/file.ext", stream=True) + @mock.patch.dict(os.environ, {}, clear=True) def test_http_read_range_error(): with pytest.raises(NotImplementedError): - data = get_data_binary("http://path/to/file.ext", (0,100)) + data = get_data_binary("http://path/to/file.ext", (0, 100)) + @mock.patch.dict(os.environ, {}, clear=True) @mock.patch("requests.get") def test_http_read_ok(mock_http): - try : + try: requests_instance = MagicMock() - requests_instance.content = b'data' + requests_instance.content = b"data" mock_http.return_value = requests_instance data = get_data_str("http://path/to/file.ext") mock_http.assert_called_with("http://path/to/file.ext", stream=True) - assert data == 'data' + assert data == "data" except Exception as exc: assert False, f"HTTP read raises an exception: {exc}" ############ put_data_str + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -225,6 +231,7 @@ def test_s3_write_ok(mocked_s3_client): except Exception as exc: assert False, f"S3 write raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -302,6 +309,7 @@ def test_copy_s3_file_nok(mock_hash_file, mock_makedirs, mocked_s3_client): copy("s3://bucket/source.ext", "file:///path/to/destination.ext", "toto") mock_makedirs.assert_called_once_with("/path/to", exist_ok=True) + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -320,6 +328,7 @@ def test_copy_file_s3_ok(mocked_s3_client): except Exception as exc: assert False, f"FILE -> S3 copy raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -338,6 +347,7 @@ def test_copy_s3_s3_ok(mocked_s3_client): except Exception as exc: assert False, f"S3 -> S3 copy raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -373,6 +383,7 @@ def test_copy_s3_s3_intercluster_nok(mocked_s3_client): with pytest.raises(StorageError): copy("s3://bucket@a/source.ext", "s3://bucket@c/destination.ext", "toto") + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -399,6 +410,7 @@ def test_copy_ceph_file_ok(mock_file, mock_makedirs, mocked_rados_client): except Exception as exc: assert False, f"CEPH -> FILE copy raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -423,6 +435,7 @@ def test_copy_file_ceph_ok(mock_file, mocked_rados_client): except Exception as exc: assert False, f"FILE -> CEPH copy raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -487,14 +500,14 @@ def test_copy_ceph_s3_ok(mock_file, mocked_s3_client, mocked_rados_client): except Exception as exc: assert False, f"CEPH -> S3 copy raises an exception: {exc}" + @mock.patch.dict(os.environ, {}, clear=True) -@mock.patch('requests.get') -@patch('builtins.open', new_callable=mock_open) +@mock.patch("requests.get") +@patch("builtins.open", new_callable=mock_open) def test_copy_http_file_ok(mock_open, mock_requests): try: - http_instance = MagicMock() - http_instance.iter_content.return_value = ["data","data2"] + http_instance.iter_content.return_value = ["data", "data2"] mock_requests.return_value = http_instance copy("http://path/to/source.ext", "file:///path/to/destination.ext") @@ -503,17 +516,20 @@ def test_copy_http_file_ok(mock_open, mock_requests): except Exception as exc: assert False, f"HTTP -> FILE copy raises an exception: {exc}" -@mock.patch.dict(os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, clear=True) -@mock.patch('rok4.Storage.rados.Rados') -@mock.patch('requests.get') + +@mock.patch.dict( + os.environ, + {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, + clear=True, +) +@mock.patch("rok4.Storage.rados.Rados") +@mock.patch("requests.get") def test_copy_http_ceph_ok(mock_requests, mocked_rados_client): try: - http_instance = MagicMock() - http_instance.iter_content.return_value = ["data","data2"] + http_instance.iter_content.return_value = ["data", "data2"] mock_requests.return_value = http_instance - disconnect_ceph_clients() ioctx_instance = MagicMock() ioctx_instance.write.return_value = None @@ -526,16 +542,20 @@ def test_copy_http_ceph_ok(mock_requests, mocked_rados_client): except Exception as exc: assert False, f"HTTP -> CEPH copy raises an exception: {exc}" -@mock.patch.dict(os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, clear=True) -@mock.patch('rok4.Storage.boto3.client') -@mock.patch('requests.get') -@patch('tempfile.NamedTemporaryFile', new_callable=mock_open) -@mock.patch('os.remove') + +@mock.patch.dict( + os.environ, + {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, + clear=True, +) +@mock.patch("rok4.Storage.boto3.client") +@mock.patch("requests.get") +@patch("tempfile.NamedTemporaryFile", new_callable=mock_open) +@mock.patch("os.remove") def test_copy_http_s3_ok(mock_remove, mock_tempfile, mock_requests, mocked_s3_client): try: - http_instance = MagicMock() - http_instance.iter_content.return_value = ["data","data2"] + http_instance.iter_content.return_value = ["data", "data2"] mock_requests.return_value = http_instance disconnect_s3_clients() @@ -546,7 +566,7 @@ def test_copy_http_s3_ok(mock_remove, mock_tempfile, mock_requests, mocked_s3_cl copy("http://path/to/source.ext", "s3://bucket/destination.ext") mock_requests.assert_called_once_with("http://path/to/source.ext", stream=True) - mock_tempfile.assert_called_once_with("w+b",delete=False) + mock_tempfile.assert_called_once_with("w+b", delete=False) except Exception as exc: assert False, f"HTTP -> CEPH copy raises an exception: {exc}" @@ -563,6 +583,7 @@ def test_link_hard_nok(): with pytest.raises(StorageError): link("ceph://pool1/source.ext", "ceph://pool2/destination.ext", True) + @mock.patch.dict(os.environ, {}, clear=True) @mock.patch("os.symlink", return_value=None) def test_link_file_ok(mock_link): @@ -582,6 +603,7 @@ def test_hlink_file_ok(mock_link): except Exception as exc: assert False, f"FILE hard link raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -619,6 +641,7 @@ def test_link_s3_ok(mocked_s3_client): except Exception as exc: assert False, f"S3 link raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -637,6 +660,7 @@ def test_link_s3_nok(mocked_s3_client): ############ get_size + @mock.patch.dict(os.environ, {}, clear=True) @mock.patch("os.stat") def test_size_file_ok(mock_stat): @@ -647,6 +671,7 @@ def test_size_file_ok(mock_stat): except Exception as exc: assert False, f"FILE size raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -667,6 +692,7 @@ def test_size_ceph_ok(mocked_rados_client): except Exception as exc: assert False, f"CEPH size raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -685,12 +711,12 @@ def test_size_s3_ok(mocked_s3_client): except Exception as exc: assert False, f"S3 size raises an exception: {exc}" + @mock.patch.dict(os.environ, {}, clear=True) -@mock.patch('requests.get') +@mock.patch("requests.get") def test_size_http_ok(mock_requests): - http_instance = MagicMock() - http_instance.headers = {"content-length":12} + http_instance.headers = {"content-length": 12} mock_requests.return_value = http_instance try: @@ -717,6 +743,7 @@ def test_exists_file_ok(mock_exists): except Exception as exc: assert False, f"FILE not exists raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -768,10 +795,10 @@ def test_exists_s3_ok(mocked_s3_client): except Exception as exc: assert False, f"CEPH not exists raises an exception: {exc}" + @mock.patch.dict(os.environ, {}, clear=True) -@mock.patch('requests.get') +@mock.patch("requests.get") def test_exists_http_ok(mock_requests): - http_instance = MagicMock() http_instance.status_code = 200 mock_requests.return_value = http_instance @@ -792,6 +819,7 @@ def test_exists_http_ok(mock_requests): ############ remove + @mock.patch.dict(os.environ, {}, clear=True) @mock.patch("os.remove") def test_remove_file_ok(mock_remove): @@ -807,6 +835,7 @@ def test_remove_file_ok(mock_remove): except Exception as exc: assert False, f"FILE deletion (not found) raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -832,6 +861,7 @@ def test_remove_ceph_ok(mocked_rados_client): except Exception as exc: assert False, f"CEPH deletion (not found) raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -861,6 +891,7 @@ def test_get_osgeo_path_file_ok(): except Exception as exc: assert False, f"FILE osgeo path raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -881,6 +912,7 @@ def test_get_osgeo_path_nok(): with pytest.raises(NotImplementedError): get_osgeo_path("ceph://pool/data.ext") + ############ size_path def test_size_path_file_ok(): try: @@ -889,22 +921,31 @@ def test_size_path_file_ok(): except Exception as exc: assert False, f"FILE size of the path raises an exception: {exc}" + def test_size_file_nok(): - with pytest.raises(StorageError) : + with pytest.raises(StorageError): size = size_path("file://tests/fixtures/TIFF_PBF_M") -@mock.patch.dict(os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, clear=True) -def test_size_path_ceph_nok(): +@mock.patch.dict( + os.environ, + {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, + clear=True, +) +def test_size_path_ceph_nok(): with pytest.raises(NotImplementedError): size = size_path("ceph://pool/path") -@mock.patch.dict(os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, clear=True) -@mock.patch('rok4.Storage.boto3.client') -def test_size_path_s3_ok(mocked_s3_client): +@mock.patch.dict( + os.environ, + {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, + clear=True, +) +@mock.patch("rok4.Storage.boto3.client") +def test_size_path_s3_ok(mocked_s3_client): disconnect_s3_clients() - pages = [{"Contents" : [{"Size" : 10},{"Size" : 20}]}, {"Contents" : [{"Size" : 50}]}] + pages = [{"Contents": [{"Size": 10}, {"Size": 20}]}, {"Contents": [{"Size": 50}]}] paginator = MagicMock() paginator.paginate.return_value = pages client = MagicMock() @@ -916,4 +957,3 @@ def test_size_path_s3_ok(mocked_s3_client): assert size == 80 except Exception as exc: assert False, f"S3 size of the path raises an exception: {exc}" - diff --git a/tests/test_TileMatrixSet.py b/tests/test_TileMatrixSet.py index 4750f50..5cf6062 100644 --- a/tests/test_TileMatrixSet.py +++ b/tests/test_TileMatrixSet.py @@ -1,10 +1,11 @@ -from rok4.TileMatrixSet import TileMatrixSet -from rok4.Exceptions import * - -import pytest import os -from unittest.mock import * from unittest import mock +from unittest.mock import * + +import pytest + +from rok4.Exceptions import * +from rok4.TileMatrixSet import TileMatrixSet @mock.patch.dict(os.environ, {}, clear=True) diff --git a/tests/test_Utils.py b/tests/test_Utils.py index cdba571..0f5d67d 100644 --- a/tests/test_Utils.py +++ b/tests/test_Utils.py @@ -1,14 +1,14 @@ -from rok4.Utils import * -from rok4.Exceptions import * +import math +import os +import random +from unittest import mock +from unittest.mock import * import pytest -import os from osgeo import gdal, osr -import math -import random -from unittest.mock import * -from unittest import mock +from rok4.Exceptions import * +from rok4.Utils import * def test_srs_to_spatialreference_ignf_ok(): diff --git a/tests/test_Vector.py b/tests/test_Vector.py index 221f5e4..3fe0a1c 100644 --- a/tests/test_Vector.py +++ b/tests/test_Vector.py @@ -1,12 +1,13 @@ #!/usr/bin/env python3 -from rok4.Vector import * -from rok4.Exceptions import * -from rok4.Storage import disconnect_ceph_clients - -import pytest import os -from unittest.mock import * from unittest import mock +from unittest.mock import * + +import pytest + +from rok4.Exceptions import * +from rok4.Storage import disconnect_ceph_clients +from rok4.Vector import * @mock.patch.dict(os.environ, {}, clear=True)