Skip to content

Commit

Permalink
No more os.path
Browse files Browse the repository at this point in the history
  • Loading branch information
ecomodeller committed Sep 12, 2023
1 parent 60155a7 commit d64b9cf
Show file tree
Hide file tree
Showing 5 changed files with 84 additions and 105 deletions.
10 changes: 0 additions & 10 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,6 @@
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html

# -- Path setup --------------------------------------------------------------

# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))


# -- Project information -----------------------------------------------------

Expand Down
48 changes: 20 additions & 28 deletions tests/notebooks/test_notebooks.py
Original file line number Diff line number Diff line change
@@ -1,55 +1,47 @@
import os
import subprocess
from pathlib import Path
from typing import List


import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert.preprocessors import CellExecutionError

_TEST_DIR = os.path.dirname(os.path.abspath(__file__))
PARENT_DIR = os.path.join(_TEST_DIR, "../..")
SKIP_LIST = []


def _process_notebook(notebook_filename, notebook_path="notebooks"):
def _process_notebook(fp: Path):
"""Checks if an IPython notebook runs without error from start to finish. If so, writes the notebook to HTML (with outputs) and overwrites the .ipynb file (without outputs)."""
with open(notebook_filename) as f:
with open(fp) as f:
nb = nbformat.read(f, as_version=4)

ep = ExecutePreprocessor(timeout=600, kernel_name="python3")

try:
# Check that the notebook runs
ep.preprocess(nb, {"metadata": {"path": notebook_path}})
ep.preprocess(nb, {"metadata": {"path": "notebooks"}})
except CellExecutionError as e:
print(f"Failed executing {notebook_filename}")
print(f"Failed executing {fp}")
print(e)
raise

print(f"Successfully executed {notebook_filename}")
print(f"Successfully executed {fp}")
return


def _get_all_notebooks_in_repo(skip=[]):
"""Get all files .ipynb included in the git repository"""
git_files = (
subprocess.check_output(
"git ls-tree --full-tree --name-only -r HEAD", shell=True
)
.decode("utf-8")
.splitlines()
)
def _get_all_notebooks_in_repo() -> List[Path]:
ROOT_DIR = Path(__file__).parent.parent.parent
NOTEBOOK_DIR = ROOT_DIR / "notebooks"

return [
fn
for fn in git_files
if fn.endswith(".ipynb") and not any(s in fn for s in skip)
]
return list(NOTEBOOK_DIR.glob("*.ipynb"))


def test_notebook(notebook):
_process_notebook(os.path.join(PARENT_DIR, notebook))

_process_notebook(notebook)

def pytest_generate_tests(metafunc):
notebooks = _get_all_notebooks_in_repo(skip=SKIP_LIST)
notebooks = _get_all_notebooks_in_repo()
metafunc.parametrize("notebook", notebooks)


if __name__ == "__main__":
notebooks = _get_all_notebooks_in_repo()
for notebook in notebooks:
print(notebook)
14 changes: 7 additions & 7 deletions tests/performance/test_performance_dfs0.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import os
from pathlib import Path
import numpy as np
import pandas as pd
import mikeio


def test_write_read_long_dfs0(tmpdir):
def test_write_read_long_dfs0(tmp_path):

filename = os.path.join(tmpdir.dirname, "big.dfs0")
filename = tmp_path / "big.dfs0"

nt = 10_000_000
data = np.random.random([nt])
Expand All @@ -15,16 +15,16 @@ def test_write_read_long_dfs0(tmpdir):
)
da.to_dfs(filename)

assert os.path.exists(filename)
assert filename.exists()

ds = mikeio.read(filename)

assert len(ds.time) == nt


def test_write_read_many_items_dataset_pandas(tmpdir):
def test_write_read_many_items_dataset_pandas(tmp_path):

filename = os.path.join(tmpdir.dirname, "many_items.dfs0")
filename = tmp_path / "many_items.dfs0"

n_items = 10_000
nt = 200
Expand All @@ -38,7 +38,7 @@ def test_write_read_many_items_dataset_pandas(tmpdir):
ds = mikeio.Dataset(das)
ds.to_dfs(filename)

assert os.path.exists(filename)
assert filename.exists()

# read to dataset
ds = mikeio.read(filename)
Expand Down
27 changes: 13 additions & 14 deletions tests/test_mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,14 @@ def test_read_mixed_mesh(mixed_mesh):
assert np.all(el_tbl_vec < msh.n_nodes)


def test_read_write_mixed_mesh(mixed_mesh, tmpdir):
def test_read_write_mixed_mesh(mixed_mesh, tmp_path):
msh = mixed_mesh
outfilename = os.path.join(tmpdir.dirname, "quad_tri_v2.mesh")
outfilename = tmp_path / "quad_tri_v2.mesh"
msh.write(outfilename)

msh2 = Mesh(outfilename)

assert os.path.exists(outfilename)
assert outfilename.exists()

assert np.all(np.hstack(msh2.element_table) == np.hstack(msh.element_table))
assert np.all(msh2.element_coordinates == msh.element_coordinates)
Expand Down Expand Up @@ -78,7 +78,6 @@ def test_get_bad_node_coordinates(tri_mesh):


def test_set_z(tri_mesh):
os.path.join("tests", "testdata", "odense_rough.mesh")
msh = tri_mesh
zn = msh.node_coordinates[:, 2]
zn[zn < -3] = -3
Expand Down Expand Up @@ -107,35 +106,35 @@ def test_set_codes(tri_mesh):
msh.geometry.codes = codes[0:4]


def test_write(tri_mesh, tmpdir):
outfilename = os.path.join(tmpdir.dirname, "simple.mesh")
def test_write(tri_mesh, tmp_path):
outfilename = tmp_path / "simple.mesh"
msh = tri_mesh

msh.write(outfilename)

assert os.path.exists(outfilename)
assert outfilename.exists()


def test_write_part(tri_mesh, tmpdir):
outfilename = os.path.join(tmpdir.dirname, "simple_sub.mesh")
def test_write_part(tri_mesh, tmp_path):
outfilename = tmp_path / "simple_sub.mesh"

msh = tri_mesh

msh.write(outfilename, elements=list(range(0, 100)))

assert os.path.exists(outfilename)
assert outfilename.exists()


def test_write_mesh_from_dfsu(tmpdir):
outfilename = os.path.join(tmpdir.dirname, "quad_tri.mesh")
dfsufilename = os.path.join("tests", "testdata", "FakeLake.dfsu")
def test_write_mesh_from_dfsu(tmp_path):
outfilename = tmp_path / "quad_tri.mesh"
dfsufilename = "tests/testdata/FakeLake.dfsu"

msh = Mesh(dfsufilename)

msh.write(outfilename)

msh2 = Mesh(outfilename)

assert os.path.exists(outfilename)
assert outfilename.exists()

assert np.all(np.hstack(msh2.element_table) == np.hstack(msh.element_table))
Loading

0 comments on commit d64b9cf

Please sign in to comment.