Skip to content

Commit

Permalink
reformat code
Browse files Browse the repository at this point in the history
  • Loading branch information
rcannood committed Aug 28, 2024
1 parent df88a53 commit 0ee9657
Showing 1 changed file with 48 additions and 45 deletions.
93 changes: 48 additions & 45 deletions src/data_processors/crop_region/script.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@

## VIASH START
par = {
"input": "temp/datasets/10x_xenium/2023_10x_mouse_brain_xenium/full_dataset.zarr",
"output": "resources_test/common/2023_10x_mouse_brain_xenium/dataset.zarr",
"replicate_id": ["rep1", "rep2", "rep3"],
"min_x": [10000, 10000, 10000],
"max_x": [12000, 12000, 12000],
"min_y": [10000, 10000, 10000],
"max_y": [12000, 12000, 12000],
"input": "temp/datasets/10x_xenium/2023_10x_mouse_brain_xenium/full_dataset.zarr",
"output": "resources_test/common/2023_10x_mouse_brain_xenium/dataset.zarr",
"replicate_id": ["rep1", "rep2", "rep3"],
"min_x": [10000, 10000, 10000],
"max_x": [12000, 12000, 12000],
"min_y": [10000, 10000, 10000],
"max_y": [12000, 12000, 12000],
}
## VIASH END

Expand All @@ -28,44 +28,47 @@
sdata_out = []

for i, replicate_id in enumerate(par["replicate_id"]):
print(f"Processing replicate '{replicate_id}'", flush=True)

min_x = par["min_x"][i]
max_x = par["max_x"][i]
min_y = par["min_y"][i]
max_y = par["max_y"][i]

print(f" Cropping to region: {min_x}, {min_y}, {max_x}, {max_y}", flush=True)
sdata_query = sdata.query.bounding_box(
axes=["x", "y"],
min_coordinate=[min_x, min_y],
max_coordinate=[max_x, max_y],
target_coordinate_system=f"{replicate_id}_global",
filter_table=True
)

# process the segmentations
for segmentation_id in segmentation_ids:
print(f" Processing replicate '{replicate_id}' segmentation '{segmentation_id}'", flush=True)
shape_name = f"{replicate_id}_{segmentation_id}_boundaries"
table_name = f"{replicate_id}_{segmentation_id}_table"

if shape_name not in sdata_query.shapes:
print(f" Shape '{shape_name}' not found in sdata, skipping", flush=True)
continue

if table_name not in sdata.tables:
print(f" Table '{table_name}' not found in sdata, skipping", flush=True)
continue

obs_index = sdata_query.shapes[shape_name].index
table_query = sdata.tables[table_name][obs_index]

# add filtered table to sdata_query
sdata_query.tables[table_name] = table_query

# add filtered shapes to sdata_query
sdata_out.append(sdata_query)
print(f"Processing replicate '{replicate_id}'", flush=True)

min_x = par["min_x"][i]
max_x = par["max_x"][i]
min_y = par["min_y"][i]
max_y = par["max_y"][i]

print(f" Cropping to region: {min_x}, {min_y}, {max_x}, {max_y}", flush=True)
sdata_query = sdata.query.bounding_box(
axes=["x", "y"],
min_coordinate=[min_x, min_y],
max_coordinate=[max_x, max_y],
target_coordinate_system=f"{replicate_id}_global",
filter_table=True,
)

# process the segmentations
for segmentation_id in segmentation_ids:
print(
f" Processing replicate '{replicate_id}' segmentation '{segmentation_id}'",
flush=True,
)
shape_name = f"{replicate_id}_{segmentation_id}_boundaries"
table_name = f"{replicate_id}_{segmentation_id}_table"

if shape_name not in sdata_query.shapes:
print(f" Shape '{shape_name}' not found in sdata, skipping", flush=True)
continue

if table_name not in sdata.tables:
print(f" Table '{table_name}' not found in sdata, skipping", flush=True)
continue

obs_index = sdata_query.shapes[shape_name].index
table_query = sdata.tables[table_name][obs_index]

# add filtered table to sdata_query
sdata_query.tables[table_name] = table_query

# add filtered shapes to sdata_query
sdata_out.append(sdata_query)

# concatenate the sdata objects
print("Concatenating sdata objects", flush=True)
Expand Down

0 comments on commit 0ee9657

Please sign in to comment.