Skip to content

Commit

Permalink
Merge branch 'main' into cleaner_type_hints
Browse files Browse the repository at this point in the history
  • Loading branch information
ecomodeller authored Sep 5, 2024
2 parents 9d9b2c6 + b8d9cd2 commit 5c055e9
Show file tree
Hide file tree
Showing 40 changed files with 520 additions and 143 deletions.
19 changes: 9 additions & 10 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
@@ -1,27 +1,26 @@
{
"name": "Python 3",
"image": "mcr.microsoft.com/devcontainers/python:1-3.11-bullseye",
"postCreateCommand": "pip3 install --user -e .'[dev,test,notebooks]'",
"customizations": {
"name": "Python 3",
"image": "mcr.microsoft.com/devcontainers/python:1-3.11-bullseye",
"postCreateCommand": "pip3 install --user -e .'[dev,test,notebooks]'",
"customizations": {
"vscode": {
"extensions": [
"ms-python.python",
"ms-toolsai.jupyter",
"GitHub.vscode-pull-request-github",
"charliermarsh.ruff"

],
"settings": {
"editor.formatOnSave": true,
"[python]": {
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll": true,
"source.organizeImports": true
"source.fixAll": true,
"source.organizeImports": true
}
},
"python.formatting.provider": "black"
},
"python.formatting.provider": "charliermarsh.ruff",
}
}
}
}
}
31 changes: 0 additions & 31 deletions .github/workflows/build_docs.yml

This file was deleted.

2 changes: 1 addition & 1 deletion .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.9
python-version: 3.11

- name: Install dependencies
run: |
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/full_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ jobs:
- uses: actions/checkout@v3
- uses: chartboost/ruff-action@v1 # Fail fast if there are any linting errors
with:
version: 0.5.6 # consistent with pyproject.toml ?
version: 0.6.2 # consistent with pyproject.toml ?
src: mikeio # ignore notebooks
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
Expand Down
4 changes: 4 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,8 @@
"mypy-type-checker.args": [
"--disallow-untyped-defs",
],
"[python]": {
"editor.formatOnSave": true,
"editor.defaultFormatter": "charliermarsh.ruff"
}
}
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
3. If you are fixing a bug, first add a failing test
4. Make changes
5. Verify that all tests passes by running `pytest` from the package root directory
6. Format the code by running [black (`pip install black==22.3.0`)](https://black.readthedocs.io/en/stable/) e.g. `black nameofthefiletoformat.py`
6. Format the code by running [`ruff format`](https://docs.astral.sh/ruff/formatter/)
6. Make a pull request with a summary of the changes

Tests can also be run with tools like VS Code
Expand Down
5 changes: 4 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,10 @@ build: typecheck test
python -m build

lint:
ruff check .
ruff check mikeio

format:
ruff format $(LIB)/

pylint:
pylint --disable=all --enable=attribute-defined-outside-init mikeio/
Expand Down
10 changes: 10 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,16 @@
![OS](https://img.shields.io/badge/OS-Windows%20%7C%20Linux-blue)


> [!NOTE]
>
> ![](https://training.dhigroup.com/ACA_Logo.png?v=1647618443406)
>
> **Instructor-led course**
>
> [Getting started with MIKE IO for Python processing of dfs files](https://training.dhigroup.com/event/sessions?id=Instructor-led_course_-_Online852475847)
>
> From 4th September 2024 to 2nd October 2024
Read, write and manipulate dfs0, dfs1, dfs2, dfs3, dfsu and mesh files.

MIKE IO facilitates common data processing workflows for [MIKE files in Python](https://www.mikepoweredbydhi.com/products/mike-for-developers#io).
Expand Down
10 changes: 10 additions & 0 deletions docs/index.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,16 @@
![Python version](https://img.shields.io/pypi/pyversions/mikeio.svg)
[![PyPI version](https://badge.fury.io/py/mikeio.svg)](https://badge.fury.io/py/mikeio)

::: callout-note
![](https://training.dhigroup.com/ACA_Logo.png?v=1647618443406)

**Instructor-led course**

[Getting started with MIKE IO for Python processing of dfs files](https://training.dhigroup.com/event/sessions?id=Instructor-led_course_-_Online852475847)

From 4th September 2024 to 2nd October 2024
:::

Read, write and manipulate dfs0, dfs1, dfs2, dfs3, dfsu and mesh files.

See our sister library [MIKE IO 1D](https://github.com/DHI/mikeio1d) for .res1d and .xns11 files.
Expand Down
4 changes: 2 additions & 2 deletions mikeio/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@
# 'X.Y.dev0' is the canonical version of 'X.Y.dev'
#

__version__ = "2.1.dev0" # TODO use git hash instead for dev version?
__version__ = "2.1.0" # TODO use git hash instead for dev version?
# __version__ = "1.5.0"
__dfs_version__: int = 200
__dfs_version__: int = 210


if "64" not in architecture()[0]:
Expand Down
5 changes: 1 addition & 4 deletions mikeio/_track.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ def _extract_track(
dtype: Any, # TODO DTypeLike?
data_read_func: Callable[[int, int], tuple[np.ndarray, float]],
) -> Dataset:

if not isinstance(geometry, GeometryFM2D):
raise NotImplementedError("Only implemented for 2d flexible mesh geometries")

Expand Down Expand Up @@ -65,9 +64,7 @@ def _extract_track(
assert isinstance(
times, pd.DatetimeIndex
), "The index must be a pandas.DatetimeIndex"
assert (
times.is_monotonic_increasing
), "The time index must be monotonic increasing. Consider df.sort_index() before passing to extract_track()."
assert times.is_monotonic_increasing, "The time index must be monotonic increasing. Consider df.sort_index() before passing to extract_track()."

data_list = [coords[:, 0], coords[:, 1]] # lon,lat
for item in range(n_items):
Expand Down
2 changes: 0 additions & 2 deletions mikeio/dataset/_data_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ def __call__(
def _get_ax(
ax: Axes | None = None, figsize: tuple[float, float] | None = None
) -> Axes:

if ax is None:
_, ax = plt.subplots(figsize=figsize)
return ax
Expand All @@ -68,7 +67,6 @@ def _get_ax(
def _get_fig_ax(
ax: Axes | None = None, figsize: tuple[float, float] | None = None
) -> tuple[Figure, Axes]:

if ax is None:
fig, ax = plt.subplots(figsize=figsize)
else:
Expand Down
15 changes: 9 additions & 6 deletions mikeio/dataset/_dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,6 @@ def _guess_dims(
ndim_no_time = ndim if (len(dims) == 0) else ndim - 1

if isinstance(geometry, GeometryUndefined):

DIMS_MAPPING: Mapping[int, Sequence[Any]] = {
0: [],
1: ["x"],
Expand Down Expand Up @@ -964,7 +963,9 @@ def interp(

if interpolant is None:
interpolant = self.geometry.get_2d_interpolant(
coords, n_nearest=n_nearest, **kwargs # type: ignore
coords, # type: ignore
n_nearest=n_nearest,
**kwargs, # type: ignore
)
dai = self.geometry.interp2d(self, *interpolant).flatten() # type: ignore
if z is None:
Expand Down Expand Up @@ -1227,15 +1228,15 @@ def interp_like(

@staticmethod
def concat(
dataarrays: Sequence["DataArray"], keep: Literal["last"] = "last"
dataarrays: Sequence["DataArray"], keep: Literal["last", "first"] = "last"
) -> "DataArray":
"""Concatenate DataArrays along the time axis
Parameters
---------
dataarrays: sequence of DataArrays
keep: str, optional
TODO Yet to be implemented, default: last
keep: 'first' or 'last', optional
default: last
Returns
-------
Expand Down Expand Up @@ -1505,7 +1506,9 @@ def aggregate(
if "name" in kwargs:
item.name = kwargs.pop("name")

with warnings.catch_warnings(): # there might be all-Nan slices, it is ok, so we ignore them!
with (
warnings.catch_warnings()
): # there might be all-Nan slices, it is ok, so we ignore them!
warnings.simplefilter("ignore", category=RuntimeWarning)
data = func(self.to_numpy(), axis=axis, keepdims=False, **kwargs)

Expand Down
Loading

0 comments on commit 5c055e9

Please sign in to comment.