Skip to content

Commit

Permalink
Merge pull request #207 from SciCatProject/more-ruff-rules
Browse files Browse the repository at this point in the history
More ruff rules
  • Loading branch information
jl-wynen authored Apr 26, 2024
2 parents 2daff26 + 5e47fa4 commit 3a44e6c
Show file tree
Hide file tree
Showing 26 changed files with 126 additions and 112 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ repos:
args: ["--drop-empty-cells",
"--extra-keys 'metadata.language_info.version cell.metadata.jp-MarkdownHeadingCollapsed cell.metadata.pycharm'"]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.4.0
rev: v0.4.2
hooks:
- id: ruff-format
types_or: [ python, pyi ]
Expand Down
2 changes: 1 addition & 1 deletion docs/user-guide/testing.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -482,7 +482,7 @@
" if real_client is None:\n",
" pytest.skip(\"Backend tests disabled\")\n",
" # or do something else\n",
" \n",
"\n",
" # do the actual tests"
]
},
Expand Down
13 changes: 7 additions & 6 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -94,14 +94,14 @@ extend-include = ["*.ipynb"]
extend-exclude = [".*", "__pycache__", "build", "dist", "venv"]

[tool.ruff.lint]
select = ["B", "D", "E", "F", "G", "I", "S", "T20", "UP", "PGH", "FBT003", "RUF"]
select = ["B", "C4", "D", "DTZ", "E", "F", "G", "I", "FBT003", "PERF", "PGH", "PT", "PYI", "RUF", "S", "T20", "W"]
ignore = [
"B905", # `zip()` without an explicit `strict=` parameter
"S324", # insecure hsh function; we don't use hashing for security
"E741", "E742", "E743", # do not use names ‘l’, ‘O’, or ‘I’; they are not a problem with a proper font
"D105", # most magic methods don't need docstrings as their purpose is always the same
"E741", "E742", "E743", # do not use names ‘l’, ‘O’, or ‘I’; they are not a problem with a proper font
"UP038", # does not seem to work and leads to slower code
"E111", "E114", "E117", "D206", "D300", # conflict with ruff format
"D105",
# Conflict with ruff format, see
# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
"COM812", "COM819", "D206", "D300", "E111", "E114", "E117", "ISC001", "ISC002", "Q000", "Q001", "Q002", "Q003", "W191",
]
fixable = ["I001"]
isort.known-first-party = ["scitacean"]
Expand All @@ -111,6 +111,7 @@ pydocstyle.convention = "numpy"
"tests/*" = [
"S101", # asserts are fine in tests
"D10", # no docstrings required in tests
"S324", # insecure hsh function; we don't use hashing for security
]
"docs/*" = [
"D", "E402", "F811", "F841", "RUF015", "S101", "T201",
Expand Down
6 changes: 3 additions & 3 deletions src/scitacean/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,7 +432,7 @@ def _get_or_add_orig_datablock(self, key: int | str | PID) -> OrigDatablock:

def make_upload_model(self) -> UploadDerivedDataset | UploadRawDataset:
"""Construct a SciCat upload model from self."""
model: type[UploadRawDataset] | type[UploadDerivedDataset] = (
model: type[UploadRawDataset | UploadDerivedDataset] = (
UploadRawDataset if self.type == DatasetType.RAW else UploadDerivedDataset
)
# Datablocks are not included here because they are handled separately
Expand Down Expand Up @@ -517,8 +517,8 @@ def keys(self) -> Iterable[str]:
"""
from itertools import chain

all_fields = set(field.name for field in self.fields())
my_fields = set(field.name for field in self.fields(dataset_type=self.type))
all_fields = {field.name for field in self.fields()}
my_fields = {field.name for field in self.fields(dataset_type=self.type)}
other_fields = all_fields - my_fields
invalid_fields = (
f_name for f_name in other_fields if getattr(self, f_name) is not None
Expand Down
14 changes: 7 additions & 7 deletions src/scitacean/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -379,13 +379,13 @@ def uploaded(
"""
if remote_creation_time is None:
remote_creation_time = datetime.now().astimezone(timezone.utc)
args = dict(
remote_path=RemotePath(remote_path) if remote_path is not None else None,
remote_gid=remote_gid,
remote_uid=remote_uid,
remote_perm=remote_perm,
_remote_creation_time=remote_creation_time,
)
args = {
"remote_path": RemotePath(remote_path) if remote_path is not None else None,
"remote_gid": remote_gid,
"remote_uid": remote_uid,
"remote_perm": remote_perm,
"_remote_creation_time": remote_creation_time,
}
return dataclasses.replace(
self,
_remote_size=remote_size if remote_size is not None else self.size,
Expand Down
2 changes: 1 addition & 1 deletion src/scitacean/testing/backend/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def client(request, scicat_backend) -> Client | FakeClient:


@pytest.fixture()
def require_scicat_backend(request, scicat_backend) -> None:
def require_scicat_backend(request, scicat_backend) -> None: # noqa: PT004
"""Fixture to declare that a test needs a local scicat backend.
Like :func:`scitacean.testing.backend.scicat_backend`
Expand Down
2 changes: 1 addition & 1 deletion src/scitacean/testing/sftp/_sftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def _seed_files() -> Iterable[tuple[str, str]]:
def local_access() -> SFTPAccess:
config = _docker_compose_file()
service = config["services"]["scitacean-test-sftp-server"]
env = {k: v for k, v in map(lambda s: s.split("="), service["environment"])}
env = dict(map(lambda s: s.split("="), service["environment"]))
return SFTPAccess(
host="localhost",
port=service["ports"][0].split(":")[0],
Expand Down
2 changes: 1 addition & 1 deletion src/scitacean/testing/sftp/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def sftp_data_dir(sftp_base_dir: Path | None) -> Path | None:


@pytest.fixture()
def require_sftp_fileserver(request, sftp_fileserver) -> None:
def require_sftp_fileserver(request, sftp_fileserver) -> None: # noqa: PT004
"""Fixture to declare that a test needs a local SFTP server.
Like :func:`scitacean.testing.sftp.sftp_fileserver`
Expand Down
2 changes: 1 addition & 1 deletion src/scitacean/testing/transfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def download_file(self, *, remote: RemotePath, local: Path) -> None:

def download_files(self, *, remote: list[RemotePath], local: list[Path]) -> None:
"""Download multiple files."""
for r, l in zip(remote, local):
for r, l in zip(remote, local, strict=True):
self.download_file(remote=r, local=l)


Expand Down
2 changes: 1 addition & 1 deletion src/scitacean/transfer/link.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class LinkDownloadConnection:

def download_files(self, *, remote: list[RemotePath], local: list[Path]) -> None:
"""Download files from the given remote path."""
for r, l in zip(remote, local):
for r, l in zip(remote, local, strict=True):
self.download_file(remote=r, local=l)

def download_file(self, *, remote: RemotePath, local: Path) -> None:
Expand Down
2 changes: 1 addition & 1 deletion src/scitacean/transfer/sftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(self, *, sftp_client: SFTPClient, host: str) -> None:

def download_files(self, *, remote: list[RemotePath], local: list[Path]) -> None:
"""Download files from the given remote path."""
for r, l in zip(remote, local):
for r, l in zip(remote, local, strict=True):
self.download_file(remote=r, local=l)

def download_file(self, *, remote: RemotePath, local: Path) -> None:
Expand Down
6 changes: 3 additions & 3 deletions tests/client/attachment_client_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,14 +136,14 @@ def test_get_attachments_for_dataset_no_attachments(scicat_client):
assert attachments == []


@pytest.mark.parametrize("key", ("raw", "derived"))
@pytest.mark.parametrize("key", ["raw", "derived"])
def test_get_dataset_does_not_initialise_attachments(client, key):
dset = INITIAL_DATASETS["derived"]
downloaded = client.get_dataset(dset.pid)
assert downloaded.attachments is None


@pytest.mark.parametrize("key", ("raw", "derived"))
@pytest.mark.parametrize("key", ["raw", "derived"])
def test_download_attachments_for_dataset(client, key):
dset = INITIAL_DATASETS[key]
downloaded = client.get_dataset(dset.pid)
Expand All @@ -155,7 +155,7 @@ def test_download_attachments_for_dataset(client, key):
assert with_attachments.attachments == expected


@pytest.mark.parametrize("key", ("raw", "derived"))
@pytest.mark.parametrize("key", ["raw", "derived"])
def test_get_dataset_with_attachments(client, key):
dset = INITIAL_DATASETS[key]
downloaded = client.get_dataset(dset.pid, attachments=True)
Expand Down
4 changes: 2 additions & 2 deletions tests/client/client_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,9 @@ def test_connection_error_does_not_contain_token():
)
try:
client.get_dataset("does not exist")
assert False, "There must be an exception" # noqa: B011
pytest.fail("There must be an exception")
except Exception as exc:
assert "the token/which_must-be.kept secret" not in str(exc)
assert "the token/which_must-be.kept secret" not in str(exc) # noqa: PT017
for arg in exc.args:
assert "the token/which_must-be.kept secret" not in str(arg)

Expand Down
2 changes: 1 addition & 1 deletion tests/client/datablock_client_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def orig_datablock(scicat_access):
)


@pytest.mark.parametrize("key", ("raw", "derived"))
@pytest.mark.parametrize("key", ["raw", "derived"])
def test_get_orig_datablock(scicat_client, key):
dblock = INITIAL_ORIG_DATABLOCKS[key][0]
downloaded = scicat_client.get_orig_datablocks(dblock.datasetId)
Expand Down
12 changes: 8 additions & 4 deletions tests/client/dataset_client_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def derived_dataset(scicat_access):
)


@pytest.mark.parametrize("key", ("raw", "derived"))
@pytest.mark.parametrize("key", ["raw", "derived"])
def test_get_dataset_model(scicat_client, key):
dset = INITIAL_DATASETS[key]
downloaded = scicat_client.get_dataset_model(dset.pid)
Expand Down Expand Up @@ -68,7 +68,7 @@ def test_create_dataset_model(scicat_client, derived_dataset):
def test_validate_dataset_model(real_client, require_scicat_backend, derived_dataset):
real_client.scicat.validate_dataset_model(derived_dataset)
derived_dataset.contactEmail = "NotAnEmail"
with pytest.raises(ValueError):
with pytest.raises(ValueError, match="validation in SciCat"):
real_client.scicat.validate_dataset_model(derived_dataset)


Expand All @@ -83,7 +83,9 @@ def test_get_dataset(client):
assert downloaded.meta["temperature"] == dset.scientificMetadata["temperature"]
assert downloaded.meta["data_type"] == dset.scientificMetadata["data_type"]

for dset_file, expected_file in zip(downloaded.files, dblock.dataFileList):
for dset_file, expected_file in zip(
downloaded.files, dblock.dataFileList, strict=True
):
assert dset_file.local_path is None
assert dset_file.size == expected_file.size
assert dset_file.creation_time == expected_file.time
Expand All @@ -100,7 +102,9 @@ def test_can_get_public_dataset_without_login(require_scicat_backend, scicat_acc
assert downloaded.creation_time == dset.creationTime
assert downloaded.access_groups == dset.accessGroups

for dset_file, expected_file in zip(downloaded.files, dblock.dataFileList):
for dset_file, expected_file in zip(
downloaded.files, dblock.dataFileList, strict=True
):
assert dset_file.local_path is None
assert dset_file.size == expected_file.size
assert dset_file.creation_time == expected_file.time
Expand Down
12 changes: 6 additions & 6 deletions tests/common/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ def make_file(
# and avoids potential difficulties of querying the file system.
creation_time = datetime.now().astimezone(timezone.utc)

return dict(
path=path,
creation_time=creation_time,
checksum=checksum_digest,
size=len(contents),
)
return {
"path": path,
"creation_time": creation_time,
"checksum": checksum_digest,
"size": len(contents),
}
19 changes: 9 additions & 10 deletions tests/dataset_fields_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,15 +36,15 @@ def test_init_dataset_with_only_type():


@pytest.mark.parametrize(
"typ", ("raw", "derived", DatasetType.RAW, DatasetType.DERIVED)
"typ", ["raw", "derived", DatasetType.RAW, DatasetType.DERIVED]
)
def test_init_dataset_accepted_types(typ):
dset = Dataset(type=typ)
assert dset.type == typ


def test_init_dataset_raises_for_bad_type():
with pytest.raises(ValueError):
with pytest.raises(ValueError, match="DatasetType"):
Dataset(type="bad-type") # type: ignore[arg-type]


Expand Down Expand Up @@ -420,8 +420,7 @@ def test_make_raw_model_raises_if_derived_field_set(field, data):
)
val = data.draw(st.from_type(field.type))
assume(val is not None)
setattr(dset, field.name, val)
with pytest.raises(ValueError):
with pytest.raises(pydantic.ValidationError):
dset.make_upload_model()


Expand Down Expand Up @@ -451,11 +450,11 @@ def test_make_derived_model_raises_if_raw_field_set(field, data):
val = data.draw(st.from_type(field.type))
assume(val is not None)
setattr(dset, field.name, val)
with pytest.raises(ValueError):
with pytest.raises(pydantic.ValidationError):
dset.make_upload_model()


@pytest.mark.parametrize("field", ("contact_email", "owner_email"))
@pytest.mark.parametrize("field", ["contact_email", "owner_email"])
def test_email_validation(field):
dset = Dataset(
type="raw",
Expand All @@ -473,11 +472,11 @@ def test_email_validation(field):

@pytest.mark.parametrize(
"good_orcid",
(
[
"https://orcid.org/0000-0002-3761-3201",
"https://orcid.org/0000-0001-2345-6789",
"https://orcid.org/0000-0003-2818-0368",
),
],
)
def test_orcid_validation_valid(good_orcid):
dset = Dataset(
Expand All @@ -496,12 +495,12 @@ def test_orcid_validation_valid(good_orcid):

@pytest.mark.parametrize(
"bad_orcid",
(
[
"0000-0002-3761-3201",
"https://not-orcid.eu/0000-0002-3761-3201",
"https://orcid.org/0010-0002-3765-3201",
"https://orcid.org/0000-0002-3761-320X",
),
],
)
def test_orcid_validation_missing_url(bad_orcid):
dset = Dataset(
Expand Down
Loading

0 comments on commit 3a44e6c

Please sign in to comment.