Skip to content

Commit

Permalink
testing pixel masking
Browse files Browse the repository at this point in the history
  • Loading branch information
sliu008 committed Dec 10, 2024
1 parent a3b2827 commit 8dcf529
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 8 deletions.
2 changes: 1 addition & 1 deletion podaac/subsetter/run_subsetter.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def parse_args(args: list) -> tuple:
)
parser.add_argument(
'--cut',
default=False,
default=True,
action='store_true',
help='If provided, scanline will be cut'
)
Expand Down
4 changes: 3 additions & 1 deletion podaac/subsetter/subset.py
Original file line number Diff line number Diff line change
Expand Up @@ -979,7 +979,9 @@ def subset_with_bbox(dataset: xr.Dataset, # pylint: disable=too-many-branches
(group_dataset[lat_var_name] >= lat_bounds[0]) &
(group_dataset[lat_var_name] <= lat_bounds[1]) &
temporal_cond,
cut
cut,
longitude=lon_var_name,
latitude=lat_var_name
)

datasets.append(group_dataset)
Expand Down
32 changes: 26 additions & 6 deletions podaac/subsetter/xarray_enhancements.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def get_variables_with_indexers(dataset, indexers):
return subset_vars, no_subset_vars


def where(dataset: xr.Dataset, cond: Union[xr.Dataset, xr.DataArray], cut: bool) -> xr.Dataset:
def where(dataset: xr.Dataset, cond: Union[xr.Dataset, xr.DataArray], cut: bool, longitude=None, latitude=None, pixel_subset=False) -> xr.Dataset:
"""
Return a dataset which meets the given condition.
Expand Down Expand Up @@ -238,13 +238,30 @@ def where(dataset: xr.Dataset, cond: Union[xr.Dataset, xr.DataArray], cut: bool)
indexed_ds = dataset.isel(**indexers)
subset_vars, non_subset_vars = get_variables_with_indexers(dataset, indexers)

# dataset with variables that need to be subsetted
new_dataset_sub = indexed_ds[subset_vars].where(indexed_cond)
# data with variables that shouldn't be subsetted
# Variables to subset
subset_vars_to_process = (
[var for var in subset_vars if var not in {longitude, latitude}]
if pixel_subset
else subset_vars
)

print(subset_vars_to_process)
print(non_subset_vars)
print(longitude)
print(latitude)
print(list(indexed_ds.variables))
new_dataset_sub = indexed_ds[subset_vars_to_process].where(indexed_cond)

# Variables to retain without subsetting
new_dataset_non_sub = indexed_ds[non_subset_vars]

# merge the datasets
new_dataset = xr.merge([new_dataset_non_sub, new_dataset_sub])
# Add longitude and latitude if pixel_subset is True
datasets_to_merge = [new_dataset_non_sub, new_dataset_sub]
if pixel_subset:
datasets_to_merge.append(indexed_ds[[longitude, latitude]])

# Merge datasets
new_dataset = xr.merge(datasets_to_merge)

# Cast all variables to their original type
for variable_name, variable in new_dataset.data_vars.items():
Expand Down Expand Up @@ -306,3 +323,6 @@ def where(dataset: xr.Dataset, cond: Union[xr.Dataset, xr.DataArray], cut: bool)

dc.sync_dims_inplace(dataset, new_dataset)
return new_dataset



0 comments on commit 8dcf529

Please sign in to comment.