Skip to content

Commit

Permalink
Update pre-commit hooks (#104)
Browse files Browse the repository at this point in the history
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
andersy005 and pre-commit-ci[bot] authored Feb 2, 2022
1 parent f5b1fef commit 6bcc105
Show file tree
Hide file tree
Showing 7 changed files with 46 additions and 34 deletions.
11 changes: 10 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
exclude: schema/generic_schema.yaml
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.1.0
Expand All @@ -9,11 +8,21 @@ repos:
- id: check-json
- id: check-yaml
- id: double-quote-string-fixer
- id: debug-statements
- id: mixed-line-ending

- repo: https://github.com/asottile/pyupgrade
rev: v2.31.0
hooks:
- id: pyupgrade
args:
- '--py37-plus'

- repo: https://github.com/psf/black
rev: 22.1.0
hooks:
- id: black-jupyter
- id: black

- repo: https://github.com/keewis/blackdoc
rev: v0.3.4
Expand Down
1 change: 0 additions & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
import datetime

import ecgtools
Expand Down
56 changes: 27 additions & 29 deletions ecgtools/parsers/cmip.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,35 +11,33 @@ def parse_cmip6(file):
"""Parser for CMIP6"""
keys = sorted(
list(
set(
[
'activity_id',
'branch_method',
'branch_time_in_child',
'branch_time_in_parent',
'experiment',
'experiment_id',
'frequency',
'grid',
'grid_label',
'institution_id',
'nominal_resolution',
'parent_activity_id',
'parent_experiment_id',
'parent_source_id',
'parent_time_units',
'parent_variant_label',
'realm',
'product',
'source_id',
'source_type',
'sub_experiment',
'sub_experiment_id',
'table_id',
'variable_id',
'variant_label',
]
)
{
'activity_id',
'branch_method',
'branch_time_in_child',
'branch_time_in_parent',
'experiment',
'experiment_id',
'frequency',
'grid',
'grid_label',
'institution_id',
'nominal_resolution',
'parent_activity_id',
'parent_experiment_id',
'parent_source_id',
'parent_time_units',
'parent_variant_label',
'realm',
'product',
'source_id',
'source_type',
'sub_experiment',
'sub_experiment_id',
'table_id',
'variable_id',
'variant_label',
}
)
)

Expand Down
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,6 @@
line-length = 100
target-version = ['py38']
skip-string-normalization = true

[build-system]
requires = ["setuptools>=45", "wheel", "setuptools_scm>=6.2"]
4 changes: 4 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -16,3 +16,7 @@ line_length=100
skip=
docs/source/conf.py
setup.py

[tool:pytest]
console_output_style = count
addopts = -n auto --cov=./ --cov-report=xml --verbose
1 change: 0 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,5 @@
'version_scheme': 'post-release',
'local_scheme': 'dirty-tag',
},
setup_requires=['setuptools_scm', 'setuptools>=30.3.0'],
zip_safe=False,
)
4 changes: 2 additions & 2 deletions tests/test_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def test_parse_invalid_assets():
b = Builder(sample_data_dir / 'cesm').build(parsing_func=parsing_func_errors)

assert not b.invalid_assets.empty
assert set(b.invalid_assets.columns) == set([Builder.INVALID_ASSET, Builder.TRACEBACK])
assert set(b.invalid_assets.columns) == {Builder.INVALID_ASSET, Builder.TRACEBACK}


def test_save(tmp_path):
Expand All @@ -123,6 +123,6 @@ def test_save(tmp_path):

json_path = tmp_path / 'test_catalog.json'
data = json.load(json_path.open())
assert set(['catalog_file', 'assets', 'aggregation_control', 'attributes']).issubset(
assert {'catalog_file', 'assets', 'aggregation_control', 'attributes'}.issubset(
set(data.keys())
)

0 comments on commit 6bcc105

Please sign in to comment.