Skip to content

Commit

Permalink
Merge pull request #7 from fmi-faim/improve-api
Browse files Browse the repository at this point in the history
Improve API and tests
  • Loading branch information
imagejan authored Jul 23, 2024
2 parents 6b78015 + f6a59d2 commit 6ca6024
Show file tree
Hide file tree
Showing 3 changed files with 150 additions and 22 deletions.
25 changes: 14 additions & 11 deletions pixi.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

47 changes: 42 additions & 5 deletions src/mobiedantic/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,26 @@ def initialize_with_paths(
data_format: str = 'ome.zarr',
) -> None:
sources = {}
self._update_sources(
sources=sources,
path_dict=path_dict,
channel_index=channel_index,
data_format=data_format,
)
views_dict = {'default': {'uiSelectionGroup': 'view', 'isExclusive': True}}
self.model = DatasetSchema(
is2D=is2d,
sources=sources,
views=views_dict,
)

def _update_sources(
self,
sources: dict[str, Source],
path_dict: dict[str, Path],
channel_index: int = 0,
data_format: str = 'ome.zarr',
):
for name in path_dict:
try:
source_path = {
Expand All @@ -77,11 +97,19 @@ def initialize_with_paths(
}
}
sources[name] = Source(**data)
views_dict = {'default': {'uiSelectionGroup': 'view', 'isExclusive': True}}
self.model = DatasetSchema(
is2D=is2d,
sources=sources,
views=views_dict,

def add_sources(
self,
path_dict: dict[str, Path],
*,
channel_index: int = 0,
data_format: str = 'ome.zarr',
):
self._update_sources(
sources=self.model.sources,
path_dict=path_dict,
channel_index=channel_index,
data_format=data_format,
)

def add_merged_grid(
Expand Down Expand Up @@ -151,6 +179,15 @@ def new_dataset(
self.model.defaultDataset = name
return Dataset(path=dataset_folder)

def load(self):
project_path = self.path / 'project.json'
if not project_path.exists():
message = f'Project file not found: {project_path}'
raise ValueError(message)
with open(project_path) as project_file:
data = json.loads(project_file.read())
self.model = ProjectSchema(**data)

def save(self, *, create_directory: bool = True):
if self.model is None:
message = 'Project not initialized.'
Expand Down
100 changes: 94 additions & 6 deletions tests/test_api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from pathlib import Path

from mobiedantic import Project
import pytest

from mobiedantic import Dataset, Project


def test_project(tmp_path):
Expand All @@ -15,7 +17,7 @@ def test_project(tmp_path):
def test_dataset(tmp_path):
project = Project(tmp_path)
project.initialize_model(description='Testing datasets')
dataset1 = project.new_dataset('Dataset_1')
dataset1: Dataset = project.new_dataset('Dataset_1')
sources1 = {
'A01': Path('../../non-existent.zarr/A/01/0'),
'A02': Path('../../non-existent.zarr/A/02/0'),
Expand Down Expand Up @@ -44,14 +46,100 @@ def test_dataset(tmp_path):
assert (
len(dataset1.model.views['default'].sourceTransforms[0].mergedGrid.sources) == 2
)

dataset2 = project.new_dataset('Dataset_2')
sources2 = {
'C01': Path('../../non-existent.zarr/C/01/0'),
'C02': Path('../../non-existent.zarr/C/02/0'),
}
dataset2.initialize_with_paths(
dataset1.add_sources(
path_dict=sources2,
is2d=False,
channel_index=0,
data_format='ome.zarr',
)
dataset1.add_merged_grid(
name='Merged_grid_view_2',
sources=list(sources2),
)
assert len(dataset1.model.sources) == 4
assert len(dataset1.model.views['default'].sourceDisplays) == 2
assert (
dataset1.model.views['default'].sourceDisplays[1].imageDisplay.name.root
== 'Merged_grid_view_2'
)
assert len(dataset1.model.views['default'].sourceTransforms) == 2
assert (
dataset1.model.views['default']
.sourceTransforms[1]
.mergedGrid.mergedGridSourceName.root
== 'Merged_grid_view_2'
)
assert (
len(dataset1.model.views['default'].sourceTransforms[1].mergedGrid.sources) == 2
)
project.new_dataset('Dataset_2')
assert len(project.model.datasets) == 2


def test_save_and_load(tmp_path):
project = Project(tmp_path)
project.initialize_model(description='Testing saving and loading')
dataset_name = 'dataset1'
dataset = project.new_dataset(dataset_name)
sources = {
'A01': '/path/to/source',
}
dataset.initialize_with_paths(
path_dict=sources,
is2d=True,
)
dataset.save()
project.save()

project_loaded = Project(tmp_path)
project_loaded.load()

assert project.model == project_loaded.model

dataset_loaded = Dataset(tmp_path / dataset_name)
dataset_loaded.load()

assert dataset.model == dataset_loaded.model


def test_dataset_errors(tmp_path):
filename = 'dataset.json'
with open(tmp_path / filename, 'w'):
pass
with pytest.raises(ValueError, match="'path' needs to point to a directory"):
Dataset(path=(tmp_path / filename))
dataset_dir = tmp_path / 'dataset'
dataset = Dataset(path=dataset_dir)
with pytest.raises(ValueError, match='Dataset not initialized.'):
dataset.save()
sources = {
'A01': '/path/to/source',
}
dataset.initialize_with_paths(
path_dict=sources,
is2d=True,
)
with pytest.raises(
ValueError, match="Dataset folder doesn't exist yet and may not be created."
):
dataset.save(create_directory=False)
with pytest.raises(ValueError, match='Dataset file not found'):
dataset.load()


def test_project_errors(tmp_path):
project = Project(tmp_path / 'non-existent_subfolder')
with pytest.raises(ValueError, match='Project not initialized'):
project.save()
with pytest.raises(ValueError, match='Project file not found'):
project.load()
with pytest.raises(ValueError, match='Project not initialized'):
project.new_dataset('dataset1')
project.initialize_model(description='Test raising errors.')
with pytest.raises(
ValueError, match="Project folder doesn't exist yet and may not be created."
):
project.save(create_directory=False)

0 comments on commit 6ca6024

Please sign in to comment.