Skip to content

Commit

Permalink
Merge pull request #48 from Guts/compliance/rename-files-to-comply-pep8
Browse files Browse the repository at this point in the history
Renomme les fichiers Python pour se conformer aux conventions du langage (PEP8)
  • Loading branch information
Dolite authored Jul 18, 2023
2 parents bc67460 + d2f4985 commit a053513
Show file tree
Hide file tree
Showing 11 changed files with 33 additions and 24 deletions.
2 changes: 1 addition & 1 deletion src/rok4/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import re

from rok4.exceptions import *
from rok4.pyramid import Pyramid, PyramidType
from rok4.pyramid import Pyramid
from rok4.tile_matrix_set import TileMatrixSet
from rok4.storage import *
from rok4.utils import *
Expand Down
20 changes: 11 additions & 9 deletions src/rok4/pyramid.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,8 @@
from rok4.tile_matrix_set import TileMatrixSet, TileMatrix
from rok4.storage import *
from rok4.utils import *


from rok4.enums import PyramidType, SlabType, StorageType


ROK4_IMAGE_HEADER_SIZE = 2048
"""Slab's header size, 2048 bytes"""

Expand Down Expand Up @@ -609,7 +606,7 @@ def storage_root(self) -> str:
Returns:
str: Pyramid's storage root
"""

return self.__storage["root"].split("@", 1)[
0
] # Suppression de l'éventuel hôte de spécification du cluster S3
Expand Down Expand Up @@ -735,7 +732,7 @@ def list_generator(self) -> Iterator[Tuple[Tuple[SlabType, str, int, int], Dict]
S3 stored descriptor
from rok4.pyramid import Pyramid
try:
pyramid = Pyramid.from_descriptor("s3://bucket_name/path/to/descriptor.json")
Expand All @@ -759,6 +756,7 @@ def list_generator(self) -> Iterator[Tuple[Tuple[SlabType, str, int, int], Dict]
'slab': 'DATA_18_5424_7526'
}
)
Raises:
StorageError: Unhandled pyramid storage to copy list
MissingEnvironmentError: Missing object storage informations
Expand Down Expand Up @@ -826,7 +824,7 @@ def get_level(self, level_id: str) -> "Level":
Returns:
The corresponding pyramid's level, None if not present
"""

return self.__levels.get(level_id, None)

def get_levels(self, bottom_id: str = None, top_id: str = None) -> List[Level]:
Expand Down Expand Up @@ -1010,7 +1008,6 @@ def get_slab_path_from_infos(
else:
return slab_path


def get_tile_data_binary(self, level: str, column: int, row: int) -> str:
"""Get a pyramid's tile as binary string
Expand Down Expand Up @@ -1256,6 +1253,7 @@ def get_tile_data_vector(self, level: str, column: int, row: int) -> Dict:
S3 stored vector pyramid, to print a tile as GeoJSON
from rok4.pyramid import Pyramid
import json
try:
Expand Down Expand Up @@ -1370,6 +1368,10 @@ def size(self) -> int:
Returns:
int: size of the pyramid
"""
if not hasattr(self,"_Pyramid__size") :
self.__size = size_path(get_path_from_infos(self.__storage["type"], self.__storage["root"], self.__name))

if not hasattr(self, "_Pyramid__size"):
self.__size = size_path(
get_path_from_infos(self.__storage["type"], self.__storage["root"], self.__name)
)

return self.__size
2 changes: 1 addition & 1 deletion src/rok4/raster.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import copy
import json
import re
from enum import Enum

from typing import Dict, Tuple

from osgeo import gdal, ogr
Expand Down
12 changes: 8 additions & 4 deletions src/rok4/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
import hashlib
import requests
from typing import Dict, List, Tuple, Union
from enum import Enum
from shutil import copyfile
from osgeo import gdal

Expand Down Expand Up @@ -68,7 +67,7 @@ def __get_s3_client(bucket_name: str) -> Tuple[Dict[str, Union["boto3.client", s
Returns:
Tuple[Dict[str, Union['boto3.client',str]], str, str]: the S3 informations (client, host, key, secret) and the simple bucket name
"""

global __S3_CLIENTS, __S3_DEFAULT_CLIENT

if not __S3_CLIENTS:
Expand Down Expand Up @@ -127,7 +126,7 @@ def __get_s3_client(bucket_name: str) -> Tuple[Dict[str, Union["boto3.client", s

def disconnect_s3_clients() -> None:
"""Clean S3 clients"""

global __S3_CLIENTS, __S3_DEFAULT_CLIENT
__S3_CLIENTS = {}
__S3_DEFAULT_CLIENT = None
Expand Down Expand Up @@ -838,10 +837,12 @@ def copy(from_path: str, to_path: str, from_md5: str = None) -> None:
response = requests.get(from_type.value + from_path, stream = True)
with open(to_path, "wb") as f:
for chunk in response.iter_content(chunk_size=65536) :

if chunk:
f.write(chunk)

except Exception as e:

raise StorageError(f"HTTP(S) and FILE", f"Cannot copy HTTP(S) object {from_path} to FILE object {to_path} : {e}")

elif (from_type == StorageType.HTTP or from_type == StorageType.HTTPS) and to_type == StorageType.CEPH :
Expand All @@ -858,6 +859,7 @@ def copy(from_path: str, to_path: str, from_md5: str = None) -> None:
offset += size

except Exception as e:

raise StorageError(f"HTTP(S) and CEPH", f"Cannot copy HTTP(S) object {from_path} to CEPH object {to_path} : {e}")

elif (from_type == StorageType.HTTP or from_type == StorageType.HTTPS) and to_type == StorageType.S3 :
Expand All @@ -869,6 +871,7 @@ def copy(from_path: str, to_path: str, from_md5: str = None) -> None:
with tempfile.NamedTemporaryFile("w+b",delete=False) as f:
name_fich = f.name
for chunk in response.iter_content(chunk_size=65536) :

if chunk:
f.write(chunk)

Expand All @@ -879,6 +882,7 @@ def copy(from_path: str, to_path: str, from_md5: str = None) -> None:
except Exception as e:
raise StorageError(f"HTTP(S) and S3", f"Cannot copy HTTP(S) object {from_path} to S3 object {to_path} : {e}")


else:
raise StorageError(
f"{from_type.name} and {to_type.name}",
Expand Down Expand Up @@ -1022,6 +1026,7 @@ def size_path(path: str) -> int :
elif storage_type == StorageType.S3:
s3_client, bucket_name = __get_s3_client(tray_name)


try :
paginator = s3_client["client"].get_paginator('list_objects_v2')
pages = paginator.paginate(
Expand All @@ -1039,7 +1044,6 @@ def size_path(path: str) -> int :
except Exception as e:
raise StorageError("S3", e)


elif storage_type == StorageType.CEPH:
raise NotImplementedError
else:
Expand Down
1 change: 0 additions & 1 deletion src/rok4/tile_matrix_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
from rok4.storage import get_data_str
from rok4.utils import *


class TileMatrix:
"""A tile matrix is a tile matrix set's level.
Expand Down
1 change: 0 additions & 1 deletion src/rok4/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import os
import re

from enum import Enum
from typing import Dict, List, Tuple, Union

from osgeo import gdal, ogr, osr
Expand Down
1 change: 0 additions & 1 deletion src/rok4/vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
from rok4.storage import get_osgeo_path, copy
from rok4.exceptions import *


class Vector:
"""A data vector
Expand Down
1 change: 0 additions & 1 deletion tests/test_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from rok4.enums import PyramidType
from rok4.exceptions import *


@mock.patch.dict(os.environ, {}, clear=True)
@mock.patch(
"rok4.layer.get_data_str",
Expand Down
14 changes: 11 additions & 3 deletions tests/test_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ def test_file_read_ok(mock_file):
except Exception as exc:
assert False, f"FILE read raises an exception: {exc}"


@mock.patch.dict(
os.environ,
{"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"},
Expand Down Expand Up @@ -179,14 +180,15 @@ def test_http_read_range_error():
@mock.patch.dict(os.environ, {}, clear=True)
@mock.patch("requests.get")
def test_http_read_ok(mock_http):

try :
requests_instance = MagicMock()
requests_instance.content = b'data'
mock_http.return_value = requests_instance

data = get_data_str("http://path/to/file.ext")
mock_http.assert_called_with("http://path/to/file.ext", stream=True)
assert data == 'data'
assert data == "data"
except Exception as exc:
assert False, f"HTTP read raises an exception: {exc}"

Expand Down Expand Up @@ -373,6 +375,7 @@ def test_copy_s3_s3_intercluster_nok(mocked_s3_client):
with pytest.raises(StorageError):
copy("s3://bucket@a/source.ext", "s3://bucket@c/destination.ext", "toto")


@mock.patch.dict(
os.environ,
{"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"},
Expand Down Expand Up @@ -423,6 +426,7 @@ def test_copy_file_ceph_ok(mock_file, mocked_rados_client):
except Exception as exc:
assert False, f"FILE -> CEPH copy raises an exception: {exc}"


@mock.patch.dict(
os.environ,
{"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"},
Expand Down Expand Up @@ -513,7 +517,6 @@ def test_copy_http_ceph_ok(mock_requests, mocked_rados_client):
http_instance.iter_content.return_value = ["data","data2"]
mock_requests.return_value = http_instance


disconnect_ceph_clients()
ioctx_instance = MagicMock()
ioctx_instance.write.return_value = None
Expand Down Expand Up @@ -546,7 +549,7 @@ def test_copy_http_s3_ok(mock_remove, mock_tempfile, mock_requests, mocked_s3_cl

copy("http://path/to/source.ext", "s3://bucket/destination.ext")
mock_requests.assert_called_once_with("http://path/to/source.ext", stream=True)
mock_tempfile.assert_called_once_with("w+b",delete=False)
mock_tempfile.assert_called_once_with("w+b", delete=False)
except Exception as exc:
assert False, f"HTTP -> CEPH copy raises an exception: {exc}"

Expand All @@ -563,6 +566,7 @@ def test_link_hard_nok():
with pytest.raises(StorageError):
link("ceph://pool1/source.ext", "ceph://pool2/destination.ext", True)


@mock.patch.dict(os.environ, {}, clear=True)
@mock.patch("os.symlink", return_value=None)
def test_link_file_ok(mock_link):
Expand Down Expand Up @@ -619,6 +623,7 @@ def test_link_s3_ok(mocked_s3_client):
except Exception as exc:
assert False, f"S3 link raises an exception: {exc}"


@mock.patch.dict(
os.environ,
{"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"},
Expand Down Expand Up @@ -792,6 +797,7 @@ def test_exists_http_ok(mock_requests):

############ remove


@mock.patch.dict(os.environ, {}, clear=True)
@mock.patch("os.remove")
def test_remove_file_ok(mock_remove):
Expand All @@ -807,6 +813,7 @@ def test_remove_file_ok(mock_remove):
except Exception as exc:
assert False, f"FILE deletion (not found) raises an exception: {exc}"


@mock.patch.dict(
os.environ,
{"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"},
Expand Down Expand Up @@ -889,6 +896,7 @@ def test_size_path_file_ok():
except Exception as exc:
assert False, f"FILE size of the path raises an exception: {exc}"


def test_size_file_nok():
with pytest.raises(StorageError) :
size = size_path("file://tests/fixtures/TIFF_PBF_M")
Expand Down
1 change: 1 addition & 0 deletions tests/test_tile_matrix_set.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

import pytest
import os
from unittest.mock import *
Expand Down
2 changes: 0 additions & 2 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@ def test_reproject_point_ok():

# Tests for the rok4.utils.compute_bbox function.


def test_compute_bbox_epsg_3857_ok():
try:
mocked_datasource = MagicMock(gdal.Dataset)
Expand Down Expand Up @@ -216,7 +215,6 @@ def test_compute_bbox_no_srs_ok():

# Tests for the rok4.utils.compute_format function.


@mock.patch("rok4.utils.gdal.Info")
@mock.patch("rok4.utils.gdal.GetColorInterpretationName", return_value="Palette")
@mock.patch("rok4.utils.gdal.GetDataTypeSize", return_value=8)
Expand Down

0 comments on commit a053513

Please sign in to comment.