Skip to content

Commit

Permalink
Fixes for CSLC static layers filtering/stitching. Burst to frame mapp…
Browse files Browse the repository at this point in the history
…ing. (#50)

* better check for downsampled stitching

* add burst-to-frame functions

* fix cslc static download filtering
  • Loading branch information
scottstanie authored Jul 16, 2024
1 parent ad499a1 commit 9e6d77c
Show file tree
Hide file tree
Showing 4 changed files with 57 additions and 4 deletions.
48 changes: 48 additions & 0 deletions src/opera_utils/burst_frame_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,3 +166,51 @@ def get_burst_ids_for_frame(
"""
frame_data = get_frame_to_burst_mapping(frame_id, json_file)
return frame_data["burst_ids"]


def get_burst_to_frame_mapping(
burst_id: str, json_file: Optional[PathOrStr] = None
) -> dict:
"""Get the burst data for one burst ID.
Parameters
----------
burst_id : str
The ID of the burst to get the frame IDs for.
json_file : PathOrStr, optional
The path to the JSON file containing the burst-to-frame mapping.
If `None`, uses the zip file fetched from `datasets`
Returns
-------
dict
The burst data for the given burst ID.
"""
if json_file is None:
json_file = datasets.fetch_burst_to_frame_mapping_file()
js = read_zipped_json(json_file)
return js["data"][burst_id.lower().replace("-", "_")]


def get_frame_ids_for_burst(
burst_id: str, json_file: Optional[PathOrStr] = None
) -> list[int]:
"""Get the frame IDs for one burst ID.
Parameters
----------
burst_id : str
The ID of the burst to get the frame IDs for.
json_file : PathOrStr, optional
The path to the JSON file containing the burst-to-frame mapping.
If `None`, fetches the remote zip file from `datasets`
Returns
-------
list[int]
The frame IDs for the given burst ID.
Most burst IDs have 1, but burst IDs in the overlap are in
2 frames.
"""
burst_data = get_burst_to_frame_mapping(burst_id, json_file)
return burst_data["frame_ids"]
9 changes: 6 additions & 3 deletions src/opera_utils/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,15 +216,18 @@ def _download_for_burst_ids(
list[Path]
Locations to saved raster files.
"""
logger.info(f"Searching {len(burst_ids)} for {product} (Dates:{start} to {end})")
logger.info(
f"Searching {len(burst_ids)} bursts, {product=} (Dates: {start} to {end})"
)
results = asf.search(
operaBurstID=list(burst_ids),
processingLevel=product.value,
start=start,
end=end,
)
logger.debug(f"Found {len(results)} total results before deduping pgeVersion")
results = filter_results_by_date_and_version(results)
if product == L2Product.CSLC:
logger.debug(f"Found {len(results)} total results before deduping pgeVersion")
results = filter_results_by_date_and_version(results)
logger.info(f"Found {len(results)} results")
session = _get_auth_session()
urls = _get_urls(results)
Expand Down
1 change: 1 addition & 0 deletions src/opera_utils/geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ def create_geometry_files(

if not burst_ids:
raise ValueError("Must provide frame_id or burst_ids")
logger.debug("Using burst IDs: %s", burst_ids)

output_path = Path(output_dir)
output_path.mkdir(exist_ok=True, parents=True)
Expand Down
3 changes: 2 additions & 1 deletion src/opera_utils/stitching.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ def merge_images(
logger.info(f"Overwrite=True: removing {outfile}")
Path(outfile).unlink()

is_downsampled = strides is not None and strides["x"] > 1 and strides["y"] > 1
if len(file_list) == 1:
logger.info("Only one image, no stitching needed")
logger.info(f"Copying {file_list[0]} to {outfile} and zeroing nodata values.")
Expand All @@ -106,7 +107,7 @@ def merge_images(
# If not, warp them to the most common projection using VRT files in a tempdir
temp_dir = tempfile.TemporaryDirectory()

if strides is not None and strides["x"] > 1 and strides["y"] > 1:
if is_downsampled:
file_list = get_downsampled_vrts(
file_list,
strides=strides,
Expand Down

0 comments on commit 9e6d77c

Please sign in to comment.