diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 83f7a01e4..21b70b30a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,14 +12,13 @@ on: jobs: call-version-info-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-version-info.yml@v0.6.0 + uses: ASFHyP3/actions/.github/workflows/reusable-version-info.yml@v0.7.0 with: - conda_env_name: RAiDER python_version: '3.10' call-docker-ghcr-workflow: needs: call-version-info-workflow - uses: ASFHyP3/actions/.github/workflows/reusable-docker-ghcr.yml@v0.6.0 + uses: ASFHyP3/actions/.github/workflows/reusable-docker-ghcr.yml@v0.7.0 with: version_tag: ${{ needs.call-version-info-workflow.outputs.version_tag }} release_branch: main diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index e6df68357..ba0111487 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -13,6 +13,6 @@ on: jobs: call-changelog-check-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.6.0 + uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.7.0 secrets: USER_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml index 9ab2093c9..c6bf089be 100644 --- a/.github/workflows/deploy-docs.yml +++ b/.github/workflows/deploy-docs.yml @@ -14,12 +14,10 @@ jobs: with: fetch-depth: 0 - - uses: conda-incubator/setup-miniconda@v2 + - uses: mamba-org/provision-with-micromamba@v14 with: - mamba-version: "*" - python-version: '3.10' - activate-environment: RAiDER - environment-file: environment.yml + extra-specs: | + python=3.10 - name: install RAiDER shell: bash -l {0} diff --git a/.github/workflows/labeled-pr.yml b/.github/workflows/labeled-pr.yml index 0471f4a6c..8601e2010 100644 --- a/.github/workflows/labeled-pr.yml +++ b/.github/workflows/labeled-pr.yml @@ -12,4 +12,4 @@ on: jobs: call-labeled-pr-check-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.6.0 + uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.7.0 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f45f9cc36..db757d9c6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,7 +7,7 @@ on: jobs: call-release-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.6.0 + uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.7.0 with: release_prefix: RAiDER develop_branch: dev diff --git a/.github/workflows/tag.yml b/.github/workflows/tag.yml index 9b097f8cf..8e73f7d06 100644 --- a/.github/workflows/tag.yml +++ b/.github/workflows/tag.yml @@ -7,7 +7,7 @@ on: jobs: call-bump-version-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.6.0 + uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.7.0 with: user: dbekaert email: bekaertdavid@gmail.com diff --git a/CHANGELOG.md b/CHANGELOG.md index f09016c2f..c2e621e41 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,21 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [0.4.1] + +### New/Updated Features ++ Reorder target points for intersection ++ Use exact coordinates of DEM to interpolate heights to target lat/lons ++ Support DEM interpolation to station file ++ Implement end-to-end test for intersection of cube with lat/lon files ++ Implement end-to-end test for calculation at stations delay ++ Update AOI to store the output directory so DEM is written to right place ++ `calcDelaysGUNW` will optionally download a GUNW product from AWS S3, process it, and upload a new version of the GUNW product in-place with the tropospheric correction layers added so that RAiDER can be used in ARIA GUNW production via HyP3. **Importantly**, tropospheric correction of GUNW products is still being activitely developed; this workflow, as well as the correction itself, is subject to change. + +### Fixed ++ Package data is more explicitly handled so that it is included in the conda-forge build; see [#467](https://github.com/dbekaert/RAiDER/pull/467) + ## [0.4.0] Adding of new GUNW support to RAiDER. This is an interface delivery allowing for subsequent integration into HYP3 (input/output parsing is not expected to change; computed data is not yet verified). diff --git a/Dockerfile b/Dockerfile index f87f40996..7b005d7c0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -44,5 +44,5 @@ RUN echo ". /opt/conda/etc/profile.d/conda.sh" >> ~/.profile && \ RUN python -m pip install --no-cache-dir /RAiDER/ -ENTRYPOINT ["/usr/bin/bash"] -CMD ["-l"] +ENTRYPOINT ["/RAiDER/tools/RAiDER/etc/entrypoint.sh"] +CMD ["--help"] diff --git a/README.md b/README.md index 1a00261b8..abcecc3d5 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ or the current development version: docker pull ghcr.io/dbekaert/raider:test ``` -To run the container and jump into a bash shell inside: +To run `raider.py` inside the container: ``` docker run -it --rm ghcr.io/dbekaert/raider:latest ``` @@ -68,6 +68,11 @@ To mount your current directory inside the container so that files will be writt docker run -it -v ${PWD}:/home/raider/work --rm ghcr.io/dbekaert/raider:latest cd work ``` +To jump into a `bash` shell inside the container: +``` +docker run -it --rm --entrypoint /bin/bash ghcr.io/dbekaert/raider:latest -l +``` + For more docker run options, see: . @@ -98,7 +103,7 @@ For development, we recommend installing directly from source. ``` git clone https://github.com/dbekaert/RAiDER.git cd RAiDER -conda create -f environment.yml +conda env create -f environment.yml conda activate RAiDER python -m pip install -e . ``` diff --git a/docs/WeatherModels.md b/docs/WeatherModels.md index ddbc87eea..58dad202c 100644 --- a/docs/WeatherModels.md +++ b/docs/WeatherModels.md @@ -18,7 +18,7 @@ ERA-5/ERA-I products require access to the ESA Copernicus servers. GMAO and MERR ------ ## 2. NOAA weather models (HRRR) -High-resolution rapid refresh (HRRR) weather model data products are generated by __[NOAA](https://rapidrefresh.noaa.gov/hrrr/)__ for the coninental US (CONUS) but not archived beyond three days. However, a public __[archive](home.chpc.utah.edu/~u0553130/Brian_Blaylock/hrrr_FAQ.html)__ is available at the University of Utah. This archive does not require a license agreement. This model has the highest spatial resolution available in RAiDER, with a horizontal grid spacing of about 3 km, and is provided in a Lambert conformal conic projection. +High-resolution rapid refresh (HRRR) weather model data products are generated by __[NOAA](https://rapidrefresh.noaa.gov/hrrr/)__ for the coninental US (CONUS) but not archived beyond three days. However, a public __[archive](https://home.chpc.utah.edu/~u0553130/Brian_Blaylock/hrrr_FAQ.html)__ is available at the University of Utah. This archive does not require a license agreement. This model has the highest spatial resolution available in RAiDER, with a horizontal grid spacing of about 3 km, and is provided in a Lambert conformal conic projection. ------ diff --git a/environment.yml b/environment.yml index 303c0c1ac..8718752d7 100644 --- a/environment.yml +++ b/environment.yml @@ -11,6 +11,7 @@ dependencies: - python>=3.8 - pip # For running + - boto3 - cdsapi - cfgrib - cmake diff --git a/pyproject.toml b/pyproject.toml index 19b81705d..3a69e4d44 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,11 +50,15 @@ repository = "https://github.com/dbekaert/RAiDER" "generateGACOSVRT.py" = "RAiDER.models.generateGACOSVRT:main" [tool.setuptools] +include-package-data = true zip-safe = false [tool.setuptools.packages.find] where = ["tools"] +[tool.setuptools.package-data] +"*" = ["*.yml", "*.yaml"] + [tool.isort] known_first_party = "RAiDER" multi_line_output = 5 diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 000000000..42b90946c --- /dev/null +++ b/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +markers = + long: mark a test as a long diff --git a/test/__init__.py b/test/__init__.py index 1bc3f6e19..36bc6c74d 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -20,3 +20,4 @@ def pushd(dir): TEST_DIR = test_dir.absolute() DATA_DIR = os.path.join(TEST_DIR, "data") GEOM_DIR = os.path.join(TEST_DIR, 'test_geom') +WM = 'GMAO' diff --git a/test/_entrypoints.py b/test/_entrypoints.py index cd1eb7f61..c7ee5d4b7 100644 --- a/test/_entrypoints.py +++ b/test/_entrypoints.py @@ -1,4 +1,5 @@ + def test_raider__main__2(script_runner): ret = script_runner.run('generateGACOSVRT.py') assert ret.success diff --git a/test/cli/_argument_parsers.py b/test/cli/_argument_parsers.py deleted file mode 100644 index 7082a0452..000000000 --- a/test/cli/_argument_parsers.py +++ /dev/null @@ -1,118 +0,0 @@ -from datetime import date, time - -import pytest - -import RAiDER.runProgram - - -@pytest.fixture -def delay_parser(): - return RAiDER.runProgram.create_parser() - - -@pytest.fixture -def stats_parser(): - pass - - -@pytest.fixture -def gnss_parser(): - pass - - -def test_delay_args(delay_parser): - args = delay_parser.parse_args([ - '--date', '20200103', - '--time', '23:00:00', - '--latlon', 'latfile.dat', 'lonfile.dat', - '--model', 'ERA5', - '--zref', '20000', - '-v', - '--out', 'test/scenario_1/' - ]) - - assert args.dateList == [date(2020, 1, 3)] - assert args.time == time(23, 0, 0) - assert args.query_area == ['latfile.dat', 'lonfile.dat'] - assert args.lineofsight is None - assert args.statevectors is None - assert args.dem is None - assert args.heightlvs is None - assert args.model == "ERA5" - assert args.files is None - assert args.wmLoc is None - assert args.zref == 20000.0 - assert args.outformat is None - assert args.out == 'test/scenario_1/' - assert args.download_only is False - assert args.verbose == 1 - - -def test_delay_los_mutually_exclusive(delay_parser): - with pytest.raises(SystemExit): - delay_parser.parse_args([ - '--date', '20200103', - '--time', '23:00:00', - '--lineofsight', 'losfile', - '--statevectors', 'statevectorfile' - ]) - - -def test_delay_aoi_mutually_exclusive(delay_parser): - with pytest.raises(SystemExit): - delay_parser.parse_args([ - '--date', '20200103', - '--time', '23:00:00', - '--bbox', '10', '20', '30', '40', - '--latlon', 'lat', 'lon', - '--station_file', 'station_file' - ]) - - with pytest.raises(SystemExit): - delay_parser.parse_args([ - '--date', '20200103', - '--time', '23:00:00', - '--bbox', '10', '20', '30', '40', - '--latlon', 'lat', 'lon', - ]) - - with pytest.raises(SystemExit): - delay_parser.parse_args([ - '--date', '20200103', - '--time', '23:00:00', - '--bbox', '10', '20', '30', '40', - '--station_file', 'station_file' - ]) - - with pytest.raises(SystemExit): - delay_parser.parse_args([ - '--date', '20200103', - '--time', '23:00:00', - '--latlon', 'lat', 'lon', - '--station_file', 'station_file' - ]) - - # AOI is required - with pytest.raises(SystemExit): - delay_parser.parse_args([ - '--date', '20200103', - '--time', '23:00:00', - ]) - - -def test_delay_model(delay_parser): - with pytest.raises(SystemExit): - delay_parser.parse_args([ - '--date', '20200103', - '--time', '23:00:00', - '--station_file', 'station_file', - '--model', 'FOOBAR' - ]) - - args = delay_parser.parse_args([ - '--date', '20200103', - '--time', '23:00:00', - '--station_file', 'station_file', - '--model', 'era-5' - ]) - assert args.model == "ERA5" diff --git a/test/test_GUNW.py b/test/test_GUNW.py index 825dd3f4a..e802eeceb 100644 --- a/test/test_GUNW.py +++ b/test/test_GUNW.py @@ -1,29 +1,30 @@ +import glob import os import shutil -import numpy as np -import pytest -import shutil import subprocess -import xarray as xr + +import numpy as np import rasterio as rio +import xarray as xr from test import TEST_DIR +WM = 'GMAO' def test_GUNW(): ## eventually to be implemented # home = os.path.expanduser('~') # netrc = os.path.join(home, '.netrc') -# +# # ## make netrc # if not os.path.exists(netrc): # name, passw = os.getenv('URSname'), os.getenv('URSpass') # cmd = f'echo "machine urs.earthdata.nasa.gov login {name} password {passw}" > ~/.netrc' # subprocess.run(cmd.split()) -# +# # cmd = f'chmod 600 {netrc}' # subprocess.run(cmd.split()) -# +# SCENARIO_DIR = os.path.join(TEST_DIR, "GUNW") os.makedirs(SCENARIO_DIR, exist_ok=True) GUNW = 'S1-GUNW-D-R-071-tops-20200130_20200124-135156-34956N_32979N-PP-913f-v2_0_4.nc' @@ -36,7 +37,7 @@ def test_GUNW(): # proc = subprocess.run(cmd.split(), stdout=subprocess.PIPE, universal_newlines=True) # assert np.isclose(proc.returncode, 0) - cmd = f'raider.py ++process calcDelaysGUNW {updated_GUNW} -m GMAO -o {SCENARIO_DIR}' + cmd = f'raider.py ++process calcDelaysGUNW -f {updated_GUNW} -m {WM} -o {SCENARIO_DIR}' proc = subprocess.run(cmd.split(), stdout=subprocess.PIPE, universal_newlines=True) assert np.isclose(proc.returncode, 0) @@ -45,7 +46,9 @@ def test_GUNW(): transform = (0.1, 0.0, -119.35, 0, -0.1, 35.05) group = 'science/grids/corrections/external/troposphere' for v in 'troposphereWet troposphereHydrostatic'.split(): + ds = rio.open(f'netcdf:{updated_GUNW}:{group}/{v}') with rio.open(f'netcdf:{updated_GUNW}:{group}/{v}') as ds: + ds.crs.to_epsg() assert np.isclose(ds.crs.to_epsg(), epsg), 'CRS incorrect' assert ds.transform.almost_equals(transform), 'Affine Transform incorrect' @@ -56,9 +59,10 @@ def test_GUNW(): crs = rio.crs.CRS.from_wkt(ds['crs'].crs_wkt) assert np.isclose(crs.to_epsg(), epsg), 'CRS incorrect' - + # Clean up files shutil.rmtree(SCENARIO_DIR) os.remove('GUNW_20200130-20200124.yaml') - return + [os.remove(f) for f in glob.glob(f'{WM}*')] + return diff --git a/test/test_checkArgs.py b/test/test_checkArgs.py index e0699e733..5e4604c98 100644 --- a/test/test_checkArgs.py +++ b/test/test_checkArgs.py @@ -1,5 +1,6 @@ import datetime import os +import shutil import pytest import multiprocessing as mp @@ -14,11 +15,10 @@ from RAiDER.losreader import Zenith, Conventional, Raytracing from RAiDER.models.gmao import GMAO - SCENARIO_1 = os.path.join(TEST_DIR, "scenario_1") SCENARIO_2 = os.path.join(TEST_DIR, "scenario_2") -@pytest.fixture +@pytest.fixture(autouse=True) def args(): d = DEFAULT_DICT d['date_list'] = [datetime.datetime(2018, 1, 1)] @@ -26,9 +26,10 @@ def args(): d['aoi'] = BoundingBox([38, 39, -92, -91]) d['los'] = Zenith() d['weather_model'] = GMAO() - - return d + for f in 'weather_files weather_dir'.split(): + shutil.rmtree(f) if os.path.exists(f) else '' + return d def isWriteable(dirpath): '''Test whether a directory is writeable''' @@ -39,6 +40,7 @@ def isWriteable(dirpath): except IOError: return False + def test_checkArgs_outfmt_1(args): '''Test that passing height levels with hdf5 outformat works''' args = args @@ -68,7 +70,7 @@ def test_checkArgs_outfmt_4(args): '''Test that passing a raster format with height levels throws an error''' args = args args.aoi = RasterRDR( - lat_file = os.path.join(SCENARIO_1, 'geom', 'lat.dat'), + lat_file = os.path.join(SCENARIO_1, 'geom', 'lat.dat'), lon_file = os.path.join(SCENARIO_1, 'geom', 'lon.dat'), ) argDict = checkArgs(args) @@ -145,6 +147,7 @@ def test_filenames_1(args): def test_filenames_2(args): '''tests that the correct filenames are generated''' args = args + args['output_directory'] = SCENARIO_2 args.aoi = StationFile(os.path.join(SCENARIO_2, 'stations.csv')) argDict = checkArgs(args) assert '20180101' in argDict['wetFilenames'][0] diff --git a/test/test_datelist.py b/test/test_datelist.py index 2f46c9d26..9e3294a21 100644 --- a/test/test_datelist.py +++ b/test/test_datelist.py @@ -5,32 +5,32 @@ import shutil import yaml import numpy as np -from test import TEST_DIR +from test import TEST_DIR, WM def test_datelist(): SCENARIO_DIR = os.path.join(TEST_DIR, 'datelist') - os.makedirs(SCENARIO_DIR, exist_ok=True) + os.makedirs(SCENARIO_DIR, exist_ok=False) dates = ['20200124', '20200130'] - + dct_group = { 'aoi_group': {'bounding_box': [28, 39, -123, -112]}, 'date_group': {'date_list': dates}, 'time_group': {'time': '00:00:00'}, - 'weather_model': 'GMAO', + 'weather_model': WM, 'runtime_group': { 'output_directory': SCENARIO_DIR, 'weather_model_directory': os.path.join(SCENARIO_DIR, 'weather_files') } } - + params = dct_group dst = os.path.join(SCENARIO_DIR, 'temp.yaml') - + with open(dst, 'w') as fh: yaml.dump(params, fh, default_flow_style=False) - + ## run raider on new file (two dates) cmd = f'raider.py {dst}' proc = subprocess.run(cmd.split(), stdout=subprocess.PIPE, universal_newlines=True) @@ -43,34 +43,34 @@ def test_datelist(): ## clean up shutil.rmtree(SCENARIO_DIR) - + return dst def test_datestep(): SCENARIO_DIR = os.path.join(TEST_DIR, 'datelist') - os.makedirs(SCENARIO_DIR, exist_ok=True) + os.makedirs(SCENARIO_DIR, exist_ok=False) st, en, step = '20200124', '20200130', 3 n_dates = 3 - + dct_group = { 'aoi_group': {'bounding_box': [28, 39, -123, -112]}, 'date_group': {'date_start': st, 'date_end': en, 'date_step': step}, 'time_group': {'time': '00:00:00'}, - 'weather_model': 'GMAO', + 'weather_model': WM, 'runtime_group': { 'output_directory': SCENARIO_DIR, 'weather_model_directory': os.path.join(SCENARIO_DIR, 'weather_files') } } - + params = dct_group dst = os.path.join(SCENARIO_DIR, 'temp.yaml') - + with open(dst, 'w') as fh: yaml.dump(params, fh, default_flow_style=False) - + ## run raider on new file (two dates) cmd = f'raider.py {dst}' proc = subprocess.run(cmd.split(), stdout=subprocess.PIPE, universal_newlines=True) @@ -82,5 +82,3 @@ def test_datestep(): ## clean up shutil.rmtree(SCENARIO_DIR) - - return dst diff --git a/test/test_delayFcns.py b/test/test_delayFcns.py new file mode 100644 index 000000000..36d5bd36b --- /dev/null +++ b/test/test_delayFcns.py @@ -0,0 +1,30 @@ +import os +import pytest + +import numpy as np +import xarray as xr + + +from test import TEST_DIR + +from RAiDER.delayFcns import getInterpolators + +SCENARIO1_DIR = os.path.join(TEST_DIR, "scenario_1", "golden_data") + + +@pytest.fixture +def wmdata(): + return xr.load_dataset(os.path.join(SCENARIO1_DIR, 'HRRR_tropo_20200101T120000_ztd.nc')) + + +def test_getInterpolators(wmdata): + ds = wmdata + tw, th = getInterpolators(ds, kind='pointwise') + assert True # always pass unless an error is raised + +def test_getInterpolators_2(wmdata): + ds = wmdata + ds['hydro'][0,0,0] = np.nan + with pytest.raises(RuntimeError): + getInterpolators(ds, kind='pointwise') + diff --git a/test/weather_model/test_downloaders.py b/test/test_downloaders.py similarity index 100% rename from test/weather_model/test_downloaders.py rename to test/test_downloaders.py diff --git a/test/test_geom/test_era5.nc b/test/test_geom/test_era5.nc deleted file mode 100644 index 89d77b7a5..000000000 Binary files a/test/test_geom/test_era5.nc and /dev/null differ diff --git a/test/test_geom/test_era5t.nc b/test/test_geom/test_era5t.nc deleted file mode 100644 index 89d77b7a5..000000000 Binary files a/test/test_geom/test_era5t.nc and /dev/null differ diff --git a/test/test_geom/test_erai.nc b/test/test_geom/test_erai.nc deleted file mode 100644 index 2865fe82e..000000000 Binary files a/test/test_geom/test_erai.nc and /dev/null differ diff --git a/test/test_hdf5_parallel.py b/test/test_hdf5_parallel.py deleted file mode 100755 index 0cfde72ac..000000000 --- a/test/test_hdf5_parallel.py +++ /dev/null @@ -1,50 +0,0 @@ -import os -from test import TEST_DIR, pushd -import pytest - -import numpy as np - -from RAiDER.delayFcns import get_delays - -# FIXME: Relying on prior setup to be performed in order for test to pass. -# This file should either by committed as test data, or set up by a fixture -# prior to the test running -POINTS_FILE = os.path.join( - TEST_DIR, - "scenario_1", - "geom", - "query_points.h5" -) -MODEL_FILE = os.path.join( - TEST_DIR, - "scenario_1", - "weather_files", - "ERA5_2020-01-03T23_00_00_15.75N_18.25N_-103.24E_-99.75E.h5" -) - - -@pytest.mark.skipif(~os.path.exists(MODEL_FILE) or - ~os.path.exists(POINTS_FILE), - reason="Will not pass until the test_scenario_*'s have run") -def test_get_delays_accuracy(tmp_path): - stepSize = 15.0 - interpType = 'rgi' - - with pushd(tmp_path): - delays_wet_1, delays_hydro_1 = get_delays( - stepSize, - POINTS_FILE, - MODEL_FILE, - interpType, - cpu_num=1 - ) - - delays_wet_4, delays_hydro_4 = get_delays( - stepSize, - POINTS_FILE, - MODEL_FILE, - interpType, - cpu_num=4 - ) - assert np.allclose(delays_wet_1, delays_wet_4) - assert np.allclose(delays_hydro_1, delays_hydro_4) diff --git a/test/test_interpolator.py b/test/test_interpolator.py index c9ec478e5..3a1d24b00 100644 --- a/test/test_interpolator.py +++ b/test/test_interpolator.py @@ -1,4 +1,6 @@ +import os import numpy as np +import rasterio as rio import pytest from scipy.interpolate import RegularGridInterpolator @@ -925,13 +927,29 @@ def f(x, y, z): assert np.allclose(ans, ans_scipy, 1e-15, equal_nan=True) assert np.allclose(ans2, ans_scipy, 1e-15, equal_nan=True) + +def test_interpolateDEM(): + s = 10 + x = np.arange(s) + dem = np.outer(x, x) + metadata = {'driver': 'GTiff', 'dtype': 'float32', + 'width': s, 'height': s, 'count': 1} + + demFile = './dem_tmp.tif' + + with rio.open(demFile, 'w', **metadata) as ds: + ds.write(dem, 1) + ds.update_tags(AREA_OR_POINT='Point') + + ## random points to interpolate to + lons = np.array([4.5, 9.5]) + lats = np.array([2.5, 9.5]) + out = interpolateDEM(demFile, (lats, lons)) + gold = np.array([[36, 81], [8, 18]], dtype=float) + assert np.allclose(out, gold) + os.remove(demFile) + # TODO: implement an interpolator test that is similar to test_scenario_1. # Currently the scipy and C++ interpolators differ on that case. -def test_interpolateDEM(): - x = np.arange(10) - dem = np.outer(x, x) - extent = [0, 9, 0, 9] - out = interpolateDEM(dem, extent, np.array([[4.5, 4.5], [0.5, 0.5], [10, 10]])) - assert np.allclose(out, np.array([20.25, 0.25, np.nan]), equal_nan=True) diff --git a/test/test_intersect.py b/test/test_intersect.py new file mode 100644 index 000000000..45cad43db --- /dev/null +++ b/test/test_intersect.py @@ -0,0 +1,135 @@ +import glob +import os +import pytest +import shutil +import subprocess +import yaml + +import numpy as np +import pandas as pd +import xarray as xr + +from test import TEST_DIR, WM + +import RAiDER + +wm = 'ERA-5' if WM == 'ERA5' else WM + + +def makeLatLonGrid(bbox, reg, out_dir, spacing=0.1): + """ Make lat lons at a specified spacing """ + S, N, W, E = bbox + lat_st, lat_en = S, N + lon_st, lon_en = W, E + + lats = np.arange(lat_st, lat_en, spacing) + lons = np.arange(lon_st, lon_en, spacing) + Lat, Lon = np.meshgrid(lats, lons) + da_lat = xr.DataArray(Lat.T, name='data', coords={'lon': lons, 'lat': lats}, dims='lat lon'.split()) + da_lon = xr.DataArray(Lon.T, name='data', coords={'lon': lons, 'lat': lats}, dims='lat lon'.split()) + + dst_lat = os.path.join(out_dir, f'lat_{reg}.nc') + dst_lon = os.path.join(out_dir, f'lon_{reg}.nc') + da_lat.to_netcdf(dst_lat) + da_lon.to_netcdf(dst_lon) + + return dst_lat, dst_lon + +def update_yaml(dct_cfg:dict, dst:str='temp.yaml'): + """ Write a new yaml file from a dictionary. + + Updates parameters in the default 'raider.yaml' file. + Each key:value pair will in 'dct_cfg' will overwrite that in the default + """ + + template_file = os.path.join( + os.path.dirname(RAiDER.__file__), 'cli', 'raider.yaml') + + with open(template_file, 'r') as f: + try: + params = yaml.safe_load(f) + except yaml.YAMLError as exc: + print(exc) + raise ValueError(f'Something is wrong with the yaml file {template_file}') + + params = {**params, **dct_cfg} + + with open(dst, 'w') as fh: + yaml.safe_dump(params, fh, default_flow_style=False) + + return dst + +def test_cube_intersect(): + """ Test the intersection of lat/lon files with the DEM (model height levels?) """ + SCENARIO_DIR = os.path.join(TEST_DIR, "INTERSECT") + os.makedirs(SCENARIO_DIR, exist_ok=True) + ## make the lat lon grid + S, N, W, E = 34, 35, -117, -116 + date = 20200130 + time ='12:00:00' + f_lat, f_lon = makeLatLonGrid([S, N, W, E], 'LA', SCENARIO_DIR, 0.1) + + ## make the template file + grp = { + 'date_group': {'date_start': date}, + 'time_group': {'time': time}, + 'weather_model': WM, + 'aoi_group': {'lat_file': f_lat, 'lon_file': f_lon}, + 'runtime_group': {'output_directory': SCENARIO_DIR}, + } + + ## generate the default template file and overwrite it with new parms + cfg = update_yaml(grp) + + ## run raider and intersect + cmd = f'raider.py {cfg}' + proc = subprocess.run(cmd.split(), stdout=subprocess.PIPE, universal_newlines=True) + assert np.isclose(proc.returncode, 0) + + ## hard code what it should be and check it matches + wm_file = os.path.join(SCENARIO_DIR, f'{WM}_hydro_{date}T{time.replace(":", "")}_ztd.tiff') + da = xr.open_dataset(wm_file)['band_data'] + gold = {'GMAO': 2.045914, 'ERA5': 2.061974, 'HRRR': 3.0972726} + assert np.isclose(da.mean().round(6), gold[WM]) + + # Clean up files + shutil.rmtree(SCENARIO_DIR) + [os.remove(f) for f in glob.glob(f'{wm}*')] + os.remove('temp.yaml') + + return + +def test_gnss_intersect(): + SCENARIO_DIR = os.path.join(TEST_DIR, "INTERSECT") + os.makedirs(SCENARIO_DIR, exist_ok=True) + gnss_file = os.path.join(TEST_DIR, 'scenario_2', 'stations.csv') + date = 20200130 + time ='12:00:00' + +# ## make the template file + grp = { + 'date_group': {'date_start': date}, + 'time_group': {'time': time}, + 'weather_model': WM, + 'aoi_group': {'station_file': gnss_file}, + 'runtime_group': {'output_directory': SCENARIO_DIR}, + } + + ## generate the default template file and overwrite it with new parms + cfg = update_yaml(grp) + + ## run raider and intersect + cmd = f'raider.py {cfg}' + proc = subprocess.run(cmd.split(), stdout=subprocess.PIPE, universal_newlines=True) + assert np.isclose(proc.returncode, 0) + + gold = {'GMAO': 2.365131, 'ERA5': 2.395992, 'HRRR': 3.435141} + df = pd.read_csv(os.path.join(SCENARIO_DIR, f'{WM}_Delay_{date}T{time.replace(":", "")}.csv')) + td = df['totalDelay'].mean().round(6) + assert np.allclose(gold[WM], td) + + shutil.rmtree(SCENARIO_DIR) + [os.remove(f) for f in glob.glob(f'{wm}*')] + os.remove('temp.yaml') + + return diff --git a/test/test_llreader.py b/test/test_llreader.py index 03ed09118..564829ae9 100644 --- a/test/test_llreader.py +++ b/test/test_llreader.py @@ -10,8 +10,8 @@ from RAiDER.utilFcns import rio_open from RAiDER.llreader import ( - StationFile, RasterRDR, BoundingBox, GeocodedFile, Geocube, - bounds_from_latlon_rasters, bounds_from_csv, get_bbox + StationFile, RasterRDR, BoundingBox, GeocodedFile, Geocube, + bounds_from_latlon_rasters, bounds_from_csv ) SCENARIO2_DIR = os.path.join(TEST_DIR, "scenario_2") @@ -75,7 +75,7 @@ def test_read_station_file(station_file): assert np.allclose(lats, stats['Lat'].values) assert np.allclose(lons, stats['Lon'].values) - + assert query.projection() == 'EPSG:4326' # Hard code the lat/lon bounds to test against changing the files diff --git a/test/weather_model/test_processWM.py b/test/test_processWM.py similarity index 100% rename from test/weather_model/test_processWM.py rename to test/test_processWM.py diff --git a/test/cli/test_raiderDelay.py b/test/test_raiderDelay.py similarity index 100% rename from test/cli/test_raiderDelay.py rename to test/test_raiderDelay.py diff --git a/test/test_scenario_2.py b/test/test_scenario_2.py new file mode 100644 index 000000000..9d366c919 --- /dev/null +++ b/test/test_scenario_2.py @@ -0,0 +1,19 @@ +import os +import pytest +import subprocess + +from test import TEST_DIR + +import numpy as np +import xarray as xr + +#TODO: include GNSS station test +# @pytest.mark.long +# def test_scenario_2(): +# test_path = os.path.join(TEST_DIR, "scenario_2", 'raider_example_2.yaml') +# process = subprocess.run(['raider.py', test_path],stdout=subprocess.PIPE, universal_newlines=True) +# assert process.returncode == 0 + +# # Clean up files +# subprocess.run(['rm', '-f', './HRRR*']) +# subprocess.run(['rm', '-rf', './weather_files']) \ No newline at end of file diff --git a/test/test_util.py b/test/test_util.py index 4314a0999..176c654eb 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -11,7 +11,7 @@ from RAiDER.utilFcns import ( _least_nonzero, cosd, rio_open, sind, - writeArrayToRaster, writeResultsToHDF5, rio_profile, + writeArrayToRaster, rio_profile, rio_extents, getTimeFromFile, enu2ecef, ecef2enu, transform_bbox, clip_bbox ) @@ -124,22 +124,6 @@ def test_rio_open(): assert np.allclose(out.shape, (45, 226)) -def test_writeResultsToHDF5(tmp_path): - lats = np.array([15.0, 15.5, 16.0, 16.5, 17.5, -40, 60, 90]) - lons = np.array([-100.0, -100.4, -91.2, 45.0, 0., -100, -100, -100]) - hgts = np.array([0., 1000., 10000., 0., 0., 0., 0., 0.]) - wet = np.zeros(lats.shape) - hydro = np.ones(lats.shape) - filename = str(tmp_path / 'dummy.hdf5') - - writeResultsToHDF5(lats, lons, hgts, wet, hydro, filename) - - with h5py.File(filename, 'r') as f: - assert np.allclose(np.array(f['lat']), lats) - assert np.allclose(np.array(f['hydroDelay']), hydro) - assert f.attrs['DelayType'] == 'Zenith' - - def test_writeArrayToRaster(tmp_path): array = np.transpose( np.array([np.arange(0, 10)]) @@ -242,6 +226,7 @@ def test_rio_extent(): dst.write(np.random.randn(11, 11), 1) profile = rio_profile("test.tif") assert rio_extents(profile) == (17.0, 18.0, 17.0, 18.0) + os.remove("test.tif") def test_rio_extent2(): @@ -457,18 +442,23 @@ def test_transform_bbox_1(): wesn = [-77.0, -76.0, 34.0, 35.0] snwe = wesn[2:] + wesn[:2] - assert transform_bbox(wesn, src_crs=4326, dest_crs=4326) == snwe + assert transform_bbox(snwe, src_crs=4326, dest_crs=4326) == snwe def test_transform_bbox_2(): - wesn = [-77.0, -76.0, 34.0, 35.0] + snwe_in = [34.0, 35.0, -77.0, -76.0] expected_snwe = [3762606.6598762725, 3874870.6347308, 315290.16886786406, 408746.7471660769] - snwe = transform_bbox(wesn, src_crs=4326, dest_crs=32618, margin=0.) + snwe = transform_bbox(snwe_in, src_crs=4326, dest_crs=32618, margin=0.) assert np.allclose(snwe, expected_snwe) +def test_clip_bbox(): + wesn = [-77.0, -76.0, 34.0, 35.0] + snwe = [34.0, 35.01, -77.0, -76.0] + snwe_in = [34.005, 35.0006, -76.999, -76.0] assert clip_bbox(wesn, 0.01) == wesn + assert clip_bbox(snwe_in, 0.01) == snwe diff --git a/test/cli/test_validators.py b/test/test_validators.py similarity index 84% rename from test/cli/test_validators.py rename to test/test_validators.py index 972c47ff8..9e11dd657 100644 --- a/test/cli/test_validators.py +++ b/test/test_validators.py @@ -1,14 +1,21 @@ -from argparse import ArgumentParser, ArgumentTypeError +from argparse import ArgumentParser from datetime import datetime, time +import os import pytest import numpy as np +from test import TEST_DIR, pushd +SCENARIO = os.path.join(TEST_DIR, "scenario_4") + +from RAiDER.cli import AttributeDict + + from RAiDER.cli.validators import ( modelName2Module, getBufferedExtent, isOutside, isInside, enforce_valid_dates as date_type, convert_time as time_type, - enforce_bbox, parse_dates + enforce_bbox, parse_dates, enforce_wm, get_los ) @@ -17,7 +24,6 @@ def parser(): return ArgumentParser() - @pytest.fixture def llsimple(): lats = (10, 12) @@ -46,6 +52,25 @@ def llarray(): return lats, lons +@pytest.fixture +def args1(): + test_file = os.path.join(SCENARIO, 'los.rdr') + args = AttributeDict({'los_file': test_file, 'los_convention': 'isce','ray_trace': False}) + return args + + + +def test_enforce_wm(): + with pytest.raises(NotImplementedError): + enforce_wm('notamodel') + + +def test_get_los_ray(args1): + args = args1 + los = get_los(args) + assert not los.ray_trace() + assert los.is_Projected() + def test_date_type(): assert date_type("2020-10-1") == datetime(2020, 10, 1) diff --git a/test/weather_model/test_weather_model.py b/test/test_weather_model.py similarity index 100% rename from test/weather_model/test_weather_model.py rename to test/test_weather_model.py diff --git a/tools/RAiDER/aria/prepFromGUNW.py b/tools/RAiDER/aria/prepFromGUNW.py index e7ec966ab..345c40705 100644 --- a/tools/RAiDER/aria/prepFromGUNW.py +++ b/tools/RAiDER/aria/prepFromGUNW.py @@ -231,11 +231,11 @@ def update_yaml(dct_cfg:dict, dst:str='GUNW.yaml'): def main(args): """ Read parameters needed for RAiDER from ARIA Standard Products (GUNW) """ - GUNWObj = GUNW(args.file, args.model, args.output_directory) + GUNWObj = GUNW(args.file, args.weather_model, args.output_directory) GUNWObj() raider_cfg = { - 'weather_model': args.model, + 'weather_model': args.weather_model, 'look_dir': GUNWObj.look_dir, 'cube_spacing_in_m': GUNWObj.spacing_m, 'aoi_group' : {'bounding_box': GUNWObj.SNWE}, diff --git a/tools/RAiDER/aws.py b/tools/RAiDER/aws.py new file mode 100644 index 000000000..6e08a13f1 --- /dev/null +++ b/tools/RAiDER/aws.py @@ -0,0 +1,53 @@ +from mimetypes import guess_type +from pathlib import Path +from typing import Union + +import boto3 + +from RAiDER.logger import logger + +S3_CLIENT = boto3.client('s3') + + +def get_tag_set() -> dict: + tag_set = { + 'TagSet': [ + { + 'Key': 'file_type', + 'Value': 'product' + } + ] + } + return tag_set + + +def get_content_type(file_location: Union[Path, str]) -> str: + content_type = guess_type(file_location)[0] + if not content_type: + content_type = 'application/octet-stream' + return content_type + + +def upload_file_to_s3(path_to_file: Union[str, Path], bucket: str, prefix: str = ''): + path_to_file = Path(path_to_file) + key = str(Path(prefix) / path_to_file) + extra_args = {'ContentType': get_content_type(key)} + + logger.info(f'Uploading s3://{bucket}/{key}') + S3_CLIENT.upload_file(str(path_to_file), bucket, key, extra_args) + + tag_set = get_tag_set() + + S3_CLIENT.put_object_tagging(Bucket=bucket, Key=key, Tagging=tag_set) + + +def get_s3_file(bucket_name, bucket_prefix, file_type: str): + result = S3_CLIENT.list_objects_v2(Bucket=bucket_name, Prefix=bucket_prefix) + for s3_object in result['Contents']: + key = s3_object['Key'] + if key.endswith(file_type): + file_name = Path(key).name + logger.info(f'Downloading s3://{bucket_name}/{key} to {file_name}') + S3_CLIENT.download_file(bucket_name, key, file_name) + return file_name + diff --git a/tools/RAiDER/checkArgs.py b/tools/RAiDER/checkArgs.py index 629b7f7a0..bae732784 100644 --- a/tools/RAiDER/checkArgs.py +++ b/tools/RAiDER/checkArgs.py @@ -28,7 +28,7 @@ def checkArgs(args): # Directories if args.weather_model_directory is None: args.weather_model_directory = os.path.join(args.output_directory, 'weather_files') - + if not os.path.exists(args.weather_model_directory): os.mkdir(args.weather_model_directory) @@ -48,7 +48,7 @@ def checkArgs(args): args.output_directory, '{}_Delay_{}.csv' .format( - args.weather_model.Model(), + args.weather_model._dataset.upper(), d.strftime('%Y%m%dT%H%M%S'), ) ) @@ -85,7 +85,7 @@ def checkArgs(args): args.weather_model._dataset.upper(), args.output_directory, ) - + wetNames.append(wetFilename) hydroNames.append(hydroFilename) diff --git a/tools/RAiDER/cli/__init__.py b/tools/RAiDER/cli/__init__.py index c0e0d1aef..8a0715ae1 100644 --- a/tools/RAiDER/cli/__init__.py +++ b/tools/RAiDER/cli/__init__.py @@ -36,7 +36,7 @@ class AttributeDict(dict): raster_format='GTiff', file_format='GTiff', download_only=False, - output_directory=os.getcwd(), + output_directory='.', weather_model_directory=None, output_projection='EPSG:4326', ) diff --git a/tools/RAiDER/cli/raider.py b/tools/RAiDER/cli/raider.py index 806a79d2b..d965af978 100644 --- a/tools/RAiDER/cli/raider.py +++ b/tools/RAiDER/cli/raider.py @@ -6,15 +6,11 @@ from textwrap import dedent -import RAiDER -from RAiDER.constants import _ZREF, _CUBE_SPACING_IN_M +from RAiDER import aws from RAiDER.logger import logger, logging from RAiDER.cli import DEFAULT_DICT, AttributeDict from RAiDER.cli.parser import add_out, add_cpus, add_verbose -from RAiDER.cli.validators import ( - enforce_time, enforce_bbox, parse_dates, get_query_region, get_heights, get_los, enforce_wm, - DateListAction, date_type, -) +from RAiDER.cli.validators import DateListAction, date_type HELP_MESSAGE = """ @@ -47,8 +43,9 @@ def read_template_file(fname): >>> template = read_template_file('raider.yaml') """ - from RAiDER.cli.validators import (enforce_time, enforce_bbox, parse_dates, - get_query_region, get_heights, get_los, enforce_wm) + from RAiDER.cli.validators import ( + enforce_time, parse_dates, get_query_region, get_heights, get_los, enforce_wm + ) with open(fname, 'r') as f: try: params = yaml.safe_load(f) @@ -164,7 +161,7 @@ def calcDelays(iargs=None): dst = os.path.join(os.getcwd(), 'raider.yaml') shutil.copyfile(template_file, dst) logger.info('Wrote %s', dst) - os.sys.exit() + sys.exit() # check: existence of input template files @@ -377,44 +374,70 @@ def downloadGNSS(): ## ------------------------------------------------------------ prepFromGUNW.py -def calcDelaysGUNW(iargs=None): +def calcDelaysGUNW(): from RAiDER.aria.prepFromGUNW import main as GUNW_prep from RAiDER.aria.calcGUNW import tropo_gunw_inf as GUNW_calc p = argparse.ArgumentParser( - description='Calculate a cube of interferometic delays for GUNW files') + description='Calculate a cube of interferometic delays for GUNW files', + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + + p.add_argument( + '--bucket', + help='S3 bucket containing ARIA GUNW NetCDF file. Will be ignored if the --file argument is provided.' + ) + + p.add_argument( + '--bucket-prefix', default='', + help='S3 bucket prefix containing ARIA GUNW NetCDF file. Will be ignored if the --file argument is provided.' + ) p.add_argument( - 'file', type=str, + '-f', '--file', type=str, help='1 ARIA GUNW netcdf file' - ) + ) p.add_argument( - '-m', '--model', default='HRRR', type=str, - help='Weather model (Default=HRRR).' - ) + '-m', '--weather-model', default='HRRR', type=str, + choices=['None', 'HRRR', 'HRES', 'GMAO'], help='Weather model.' + ) p.add_argument( - '-o', '--output_directory', default=os.getcwd(), type=str, - help='Directory to store results (Default=./).' - ) + '-o', '--output-directory', default=os.getcwd(), type=str, + help='Directory to store results.' + ) p.add_argument( - '-u', '--update_GUNW', default=True, - help='Optionally update the GUNW by writing the delays into the troposphere group (Default=True).' - ) + '-u', '--update-GUNW', default=True, + help='Optionally update the GUNW by writing the delays into the troposphere group.' + ) + + args = p.parse_args() + if args.weather_model == 'None': + # NOTE: HyP3's current step function implementation does not have a good way of conditionally + # running processing steps. This allows HyP3 to always run this step but exit immediately + # and do nothing if tropospheric correction via RAiDER is not selected. This should not cause + # any appreciable cost increase to GUNW product generation. + print('Nothing to do!') + return - args = p.parse_args(args=iargs) - args.argv = iargs if iargs else os.sys.argv[1:] # args.files = glob.glob(args.files) # eventually support multiple files + if not args.file and args.bucket: + args.file = aws.get_s3_file(args.bucket, args.bucket_prefix, '.nc') + elif not args.file: + raise ValueError('Either argument --file or --bucket must be provided') # prep the config needed for delay calcs - path_cfg, wavelength = GUNW_prep(args) + path_cfg, wavelength = GUNW_prep(args) - ## write delay cube (nc) to disk using config - ## return a list with the path to cube for each date + # write delay cube (nc) to disk using config + # return a list with the path to cube for each date cube_filenames = calcDelays([path_cfg]) - ## calculate the interferometric phase and write it out + # calculate the interferometric phase and write it out GUNW_calc(cube_filenames, args.file, wavelength, args.output_directory, args.update_GUNW) + + # upload to s3 + if args.bucket: + aws.upload_file_to_s3(args.file, args.bucket, args.bucket_prefix) diff --git a/tools/RAiDER/cli/validators.py b/tools/RAiDER/cli/validators.py index bf7ec2e11..88c19a5c6 100755 --- a/tools/RAiDER/cli/validators.py +++ b/tools/RAiDER/cli/validators.py @@ -61,11 +61,10 @@ def get_heights(args, out, station_file, bounding_box=None): ''' Parse the Height info and download a DEM if needed ''' - dem_path = os.path.join(out, 'geom') - if not os.path.exists(dem_path): - os.mkdir(dem_path) + dem_path = out + out = { - 'dem': None, + 'dem': args.get('dem'), 'height_file_rdr': None, 'height_levels': None, } @@ -75,7 +74,8 @@ def get_heights(args, out, station_file, bounding_box=None): if 'Hgt_m' not in pd.read_csv(station_file): out['dem'] = os.path.join(dem_path, 'GLO30.dem') elif os.path.exists(args.dem): - out['dem'] = args['dem'] + out['dem'] = args.dem + # crop the DEM if bounding_box is not None: dem_bounds = rio_extents(rio_profile(args.dem)) lats = dem_bounds[:2] @@ -99,17 +99,21 @@ def get_heights(args, out, station_file, bounding_box=None): elif args.get('height_file_rdr'): out['height_file_rdr'] = args.height_file_rdr - elif args.get('height_levels'): + else: + # download the DEM if needed + out['dem'] = os.path.join(dem_path, 'GLO30.dem') + + if args.get('height_levels'): if isinstance(args.height_levels, str): l = re.findall('[-0-9]+', args.height_levels) else: l = args.height_levels - out['height_levels'] = [float(ll) for ll in l] - - else: - # download the DEM if needed - out['dem'] = os.path.join(dem_path, 'GLO30.dem') + out['height_levels'] = np.array([float(ll) for ll in l]) + if np.any(out['height_levels'] < 0): + logger.warning('Weather model only extends to the surface topography; ' + 'height levels below the topography will be interpolated from the surface' + 'and may be inaccurate.') return out @@ -148,7 +152,7 @@ def get_query_region(args): query = GeocodedFile(args.geocoded_file, is_dem=is_dem) ## untested - elif arg.get('geo_cube'): + elif args.get('geo_cube'): query = Geocube(args.geo_cube) else: diff --git a/tools/RAiDER/delay.py b/tools/RAiDER/delay.py index da6bc75c6..929cac0e4 100755 --- a/tools/RAiDER/delay.py +++ b/tools/RAiDER/delay.py @@ -25,12 +25,13 @@ import numpy as np from RAiDER.delayFcns import ( - getInterpolators + getInterpolators, get_output_spacing, ) from RAiDER.logger import logger, logging from RAiDER.losreader import get_sv, getTopOfAtmosphere from RAiDER.utilFcns import ( - lla2ecef, transform_bbox, clip_bbox, rio_profile, + lla2ecef, transform_bbox, clip_bbox, writeResultsToXarray, + rio_profile, ) @@ -46,7 +47,10 @@ def tropo_delay( look_dir: str='right', ): """ - Calculate integrated delays on query points. + Calculate integrated delays on query points. Options are: + 1. Zenith delays (ZTD) + 2. Zenith delays projected to the line-of-sight (STD-projected) + 3. Slant delays integrated along the raypath (STD-raytracing) Args: dt: Datetime - Datetime object for determining when to calculate delays @@ -61,6 +65,16 @@ def tropo_delay( Returns: xarray Dataset *or* ndarrays: - wet and hydrostatic delays at the grid nodes / query points. """ + crs = CRS(out_proj) + + # Load CRS from weather model file + wm_proj = rio_profile(f"netcdf:{weather_model_file}:t")["crs"] + if wm_proj is None: + logger.warning("WARNING: I can't find a CRS in the weather model file, so I will assume you are using WGS84") + wm_proj = CRS.from_epsg(4326) + else: + wm_proj = CRS.from_wkt(wm_proj.to_wkt()) + # get heights if height_levels is None: if aoi.type() == 'Geocube': @@ -71,8 +85,8 @@ def tropo_delay( height_levels = ds.z.values #TODO: expose this as library function - ds = _get_delays_on_cube(dt, weather_model_file, aoi.bounds(), height_levels, - los, out_proj=out_proj, cube_spacing_m=cube_spacing_m, look_dir=look_dir) + ds = _get_delays_on_cube(dt, weather_model_file, wm_proj, aoi.bounds(), height_levels, + los, crs=crs, cube_spacing_m=cube_spacing_m, look_dir=look_dir) if (aoi.type() == 'bounding_box') or (aoi.type() == 'Geocube'): return ds, None @@ -89,10 +103,13 @@ def tropo_delay( hgts = aoi.readZ() pnts = transformPoints(lats, lons, hgts, pnt_proj, out_proj) if pnts.ndim == 3: - pnts = pnts.transpose(1,2,0) + pnts = pnts.transpose(2,1,0) elif pnts.ndim == 2: pnts = pnts.T - ifWet, ifHydro = getInterpolators(ds, 'ztd') # the cube from get_delays_on_cube calls the total delays 'wet' and 'hydro' + try: + ifWet, ifHydro = getInterpolators(ds, "ztd") + except RuntimeError: + logger.exception('Weather model {} failed, may contain NaNs'.format(weather_model_file)) wetDelay = ifWet(pnts) hydroDelay = ifHydro(pnts) @@ -106,43 +123,16 @@ def tropo_delay( return wetDelay, hydroDelay -def _get_delays_on_cube(dt, weather_model_file, ll_bounds, heights, los, out_proj=4326, cube_spacing_m=None, look_dir='right', nproc=1): +def _get_delays_on_cube(dt, weather_model_file, wm_proj, ll_bounds, heights, los, crs, cube_spacing_m=None, look_dir='right', nproc=1): """ raider cube generation function. """ - # For testing multiprocessing - # TODO - move this to configuration - crs = CRS(out_proj) - - # Load CRS from weather model file - wm_proj = rio_profile(f"netcdf:{weather_model_file}:t")["crs"] - if wm_proj is None: - print("WARNING: I can't find a CRS in the weather model file, so I will assume you are using WGS84") - wm_proj = CRS.from_epsg(4326) - else: - wm_proj = CRS.from_wkt(wm_proj.to_wkt()) - - # Determine the output grid extent here - wesn = ll_bounds[2:] + ll_bounds[:2] - out_snwe = transform_bbox( - wesn, src_crs=4326, dest_crs=crs - ) - - # Clip output grid to multiples of spacing - if cube_spacing_m is None: - with xarray.load_dataset(weather_model_file) as ds: - xpts = ds.x.values - ypts = ds.y.values - cube_spacing_m = np.nanmean([np.nanmean(np.diff(xpts)), np.nanmean(np.diff(ypts))]) - if wm_proj.axis_info[0].unit_name == "degree": - cube_spacing_m = cube_spacing_m * 1.0e5 # Scale by 100km - - if crs.axis_info[0].unit_name == "degree": - out_spacing = cube_spacing_m / 1.0e5 # Scale by 100km - else: - out_spacing = cube_spacing_m - out_snwe = clip_bbox(out_snwe, out_spacing) + # Determine the output grid extent here and clip output grid to multiples of spacing + snwe = transform_bbox(ll_bounds, src_crs=4326, dest_crs=crs) + out_spacing = get_output_spacing(cube_spacing_m, weather_model_file, wm_proj, crs) + out_snwe = clip_bbox(snwe, out_spacing) + logger.debug(f"Output SNWE: {out_snwe}") logger.debug(f"Output cube spacing: {out_spacing}") @@ -151,14 +141,17 @@ def _get_delays_on_cube(dt, weather_model_file, ll_bounds, heights, los, out_pro xpts = np.arange(out_snwe[2], out_snwe[3] + out_spacing, out_spacing) ypts = np.arange(out_snwe[1], out_snwe[0] - out_spacing, -out_spacing) - # If no orbit is provided # Build zenith delay cube if los.is_Zenith() or los.is_Projected(): out_type = ["zenith" if los.is_Zenith() else 'slant - projected'][0] # Get ZTD interpolators - ifWet, ifHydro = getInterpolators(weather_model_file, "total") + try: + ifWet, ifHydro = getInterpolators(weather_model_file, "total") + except RuntimeError: + logger.exception('Weather model {} failed, may contain NaNs'.format(weather_model_file)) + # Build cube wetDelay, hydroDelay = _build_cube( @@ -170,11 +163,14 @@ def _get_delays_on_cube(dt, weather_model_file, ll_bounds, heights, los, out_pro out_type = "slant - raytracing" # Get pointwise interpolators - ifWet, ifHydro = getInterpolators( - weather_model_file, - kind="pointwise", - shared=(nproc > 1), - ) + try: + ifWet, ifHydro = getInterpolators( + weather_model_file, + kind="pointwise", + shared=(nproc > 1), + ) + except RuntimeError: + logger.exception('Weather model {} failed, may contain NaNs'.format(weather_model_file)) # Build cube if nproc == 1: @@ -194,70 +190,7 @@ def _get_delays_on_cube(dt, weather_model_file, ll_bounds, heights, los, out_pro raise NotImplementedError # Write output file - # Modify this as needed for NISAR / other projects - ds = xarray.Dataset( - data_vars=dict( - wet=(["z", "y", "x"], - wetDelay, - {"units" : "m", - "description": f"wet {out_type} delay", - # 'crs': crs.to_epsg(), - "grid_mapping": "crs", - - }), - hydro=(["z", "y", "x"], - hydroDelay, - {"units": "m", - # 'crs': crs.to_epsg(), - "description": f"hydrostatic {out_type} delay", - "grid_mapping": "crs", - }), - ), - coords=dict( - x=(["x"], xpts), - y=(["y"], ypts), - z=(["z"], zpts), - ), - attrs=dict( - Conventions="CF-1.7", - title="RAiDER geo cube", - source=os.path.basename(weather_model_file), - history=str(datetime.datetime.utcnow()) + " RAiDER", - description=f"RAiDER geo cube - {out_type}", - reference_time=str(dt), - ), - ) - - # Write projection system mapping - ds["crs"] = int(-2147483647) # dummy placeholder, match GUNW - for k, v in crs.to_cf().items(): - ds.crs.attrs[k] = v - - # Write z-axis information - ds.z.attrs["axis"] = "Z" - ds.z.attrs["units"] = "m" - ds.z.attrs["description"] = "height above ellipsoid" - - # If in degrees - if crs.axis_info[0].unit_name == "degree": - ds.y.attrs["units"] = "degrees_north" - ds.y.attrs["standard_name"] = "latitude" - ds.y.attrs["long_name"] = "latitude" - - ds.x.attrs["units"] = "degrees_east" - ds.x.attrs["standard_name"] = "longitude" - ds.x.attrs["long_name"] = "longitude" - - else: - ds.y.attrs["axis"] = "Y" - ds.y.attrs["standard_name"] = "projection_y_coordinate" - ds.y.attrs["long_name"] = "y-coordinate in projected coordinate system" - ds.y.attrs["units"] = "m" - - ds.x.attrs["axis"] = "X" - ds.x.attrs["standard_name"] = "projection_x_coordinate" - ds.x.attrs["long_name"] = "x-coordinate in projected coordinate system" - ds.x.attrs["units"] = "m" + ds = writeResultsToXarray(dt, xpts, ypts, zpts, crs, wetDelay, hydroDelay, weather_model_file, out_type) return ds diff --git a/tools/RAiDER/delayFcns.py b/tools/RAiDER/delayFcns.py index 60e5f1e88..fc251db01 100755 --- a/tools/RAiDER/delayFcns.py +++ b/tools/RAiDER/delayFcns.py @@ -5,95 +5,22 @@ # RESERVED. United States Government Sponsorship acknowledged. # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -import itertools import multiprocessing as mp import xarray -from netCDF4 import Dataset import numpy as np -from pyproj import CRS, Transformer from scipy.interpolate import RegularGridInterpolator as Interpolator -from RAiDER.constants import _STEP -from RAiDER.makePoints import makePoints1D - - -def calculate_start_points(x, y, z, ds): - ''' - Args: - ---------- - wm_file: str - A file containing a regularized weather model. - Returns: - ------- - SP: ndarray - a * x 3 array containing the XYZ locations of the pixels in ECEF coordinates. - Note the ordering of the array is [Y X Z] - ''' - [X, Y, Z] = np.meshgrid(x, y, z) - - try: - t = Transformer.from_crs(ds['CRS'], 4978, always_xy=True) # converts to WGS84 geocentric - except: - print("I can't find a CRS in the weather model file, so I will assume you are using WGS84") - t = Transformer.from_crs(4326, 4978, always_xy=True) # converts to WGS84 geocentric - - return np.moveaxis(np.array(t.transform(X, Y, Z)), 0, -1), np.stack([X, Y, Z], axis=-1) - - -def get_delays( - stepSize, - SP, - LOS, - wm_file, - cpu_num=0 -): - ''' - Create the integration points for each ray path. - ''' - ifWet, ifHydro = getInterpolators(wm_file) - - with xarray.load_dataset(wm_file) as f: - try: - wm_proj = f.attrs['CRS'] - except: - wm_proj = 4326 - print("I can't find a CRS in the weather model file, so I will assume you are using WGS84") - t = Transformer.from_crs(4326, 4978, always_xy=True) # converts to WGS84 geocentric - - in_shape = SP.shape[:-1] - chunkSize = in_shape - CHUNKS = chunk(in_shape, in_shape) - Nchunks = len(CHUNKS) - max_len = 15000 - stepSize = 100 - - chunk_inputs = [(kk, CHUNKS[kk], wm_proj, SP, LOS, - chunkSize, stepSize, ifWet, ifHydro, max_len, wm_file) for kk in range(Nchunks)] - - if Nchunks == 1: - delays = process_chunk(*chunk_inputs[0]) - else: - with mp.Pool() as pool: - individual_results = pool.starmap(process_chunk, chunk_inputs) - try: - delays = np.concatenate(individual_results) - except ValueError: - delays = np.concatenate(individual_results, axis=-1) - - wet_delay = delays[0, ...].reshape(in_shape) - hydro_delay = delays[1, ...].reshape(in_shape) - - return wet_delay, hydro_delay - def getInterpolators(wm_file, kind='pointwise', shared=False): ''' Read 3D gridded data from a processed weather model file and wrap it with - an interpolator + the scipy RegularGridInterpolator ''' # Get the weather model data try: ds = xarray.load_dataset(wm_file) - except: + except ValueError: ds = wm_file xs_wm = np.array(ds.variables['x'][:]) @@ -105,6 +32,9 @@ def getInterpolators(wm_file, kind='pointwise', shared=False): wet = np.array(wet).transpose(1, 2, 0) hydro = np.array(hydro).transpose(1, 2, 0) + if np.any(np.isnan(wet)) or np.any(np.isnan(hydro)): + raise RuntimeError('Weather model {} contains NaNs'.format(wm_file)) + # If shared interpolators are requested # The arrays are not modified - so turning off lock for performance if shared: @@ -135,124 +65,6 @@ def make_shared_raw(inarr): return shared_arr_np -def chunk(chunkSize, in_shape): - ''' - Create a set of indices to use as chunks - ''' - startInds = makeChunkStartInds(chunkSize, in_shape) - chunkInds = makeChunksFromInds(startInds, chunkSize, in_shape) - return chunkInds - - -def makeChunkStartInds(chunkSize, in_shape): - ''' - Create a list of start indices for chunking a numpy D-dimensional array. - Inputs: - chunkSize - length-D tuple containing chunk sizes - in_shape - length-D tuple containing the shape of the array to be chunked - Outputs - chunkInds - a list of length-D tuples, where each tuple is the starting - multi-index of each chunk - Example: - makeChunkStartInds((2,2,16), (4,8,16)) - Output: [(0, 0, 0), - (0, 2, 0), - (0, 4, 0), - (0, 6, 0), - (2, 0, 0), - (2, 2, 0), - (2, 4, 0), - (2, 6, 0)] - ''' - if len(in_shape) == 1: - chunkInds = [(i,) for i in range(0, in_shape[0], chunkSize[0])] - - elif len(in_shape) == 2: - chunkInds = [(i, j) for i, j in itertools.product(range(0, in_shape[0], chunkSize[0]), - range(0, in_shape[1], chunkSize[1]))] - elif len(in_shape) == 3: - chunkInds = [(i, j, k) for i, j, k in itertools.product(range(0, in_shape[0], chunkSize[0]), - range(0, in_shape[1], chunkSize[1]), - range(0, in_shape[2], chunkSize[2]))] - else: - raise NotImplementedError('makeChunkStartInds: ndim > 3 not supported') - - return chunkInds - - -def makeChunksFromInds(startInd, chunkSize, in_shape): - ''' - From a length-N list of tuples containing starting indices, - create a list of indices into chunks of a numpy D-dimensional array. - Inputs: - startInd - A length-N list of D-dimensional tuples containing the - starting indices of a set of chunks - chunkSize - A D-dimensional tuple containing chunk size in each dimension - in_shape - A D-dimensional tuple containing the size of each dimension - Outputs: - chunks - A length-N list of length-D lists, where each element of the - length-D list is a numpy array of indices - Example: - makeChunksFromInds([(0, 0), (0, 2), (2, 0), (2, 2)],(4,4),(2,2)) - Output: - [[np.array([0, 0, 1, 1]), np.array([0, 1, 0, 1])], - [np.array([0, 0, 1, 1]), np.array([2, 3, 2, 3])], - [np.array([2, 2, 3, 3]), np.array([0, 1, 0, 1])], - [np.array([2, 2, 3, 3]), np.array([2, 3, 2, 3])]] - ''' - indices = [] - for ci in startInd: - index = [] - for si, k, dim in zip(ci, chunkSize, range(len(chunkSize))): - if si + k > in_shape[dim]: - dend = in_shape[dim] - else: - dend = si + k - index.append(np.array(range(si, dend))) - indices.append(index) - - # Now create the index mesh (for Ndim > 1) - chunks = [] - if len(in_shape) > 1: - for index in indices: - chunks.append([np.array(g) for g in zip(*list(itertools.product(*index)))]) - else: - chunks = indices - - return chunks - - -def process_chunk(k, chunkInds, proj_wm, SP, SLV, chunkSize, stepSize, ifWet, ifHydro, max_len, wm_file): - """ - Perform the interpolation and integration over a single chunk. - """ - # Transformer from ECEF to weather model - t = Transformer.from_proj(4978, proj_wm, always_xy=True) - - # datatype must be specific for the cython makePoints* function - _DTYPE = np.float64 - - # H5PY does not support fancy indexing with tuples, hence this if/else check - if len(chunkSize) == 1: - row = chunkInds[0] - ray = makePoints1D(max_len, SP[row, :].astype(_DTYPE), SLV[row, :].astype(_DTYPE), stepSize) - elif len(chunkSize) == 2: - row, col = chunkInds - ray = makePoints1D(max_len, SP[row, col, :].astype(_DTYPE), SLV[row, col, :].astype(_DTYPE), stepSize) - elif len(chunkSize) == 3: - row, col, zind = chunkInds - ray = makePoints1D(max_len, SP[row, col, zind, :].astype(_DTYPE), SLV[row, col, zind, :].astype(_DTYPE), stepSize) - else: - raise RuntimeError('Data in more than 4 dimensions is not supported') - - ray_x, ray_y, ray_z = t.transform(ray[..., 0, :], ray[..., 1, :], ray[..., 2, :]) - delay_wet = interpolate2(ifWet, ray_x, ray_y, ray_z) - delay_hydro = interpolate2(ifHydro, ray_x, ray_y, ray_z) - int_delays = _integrateLOS(stepSize, delay_wet, delay_hydro) - - return int_delays - - def interpolate2(fun, x, y, z): ''' helper function to make the interpolation step cleaner @@ -263,30 +75,22 @@ def interpolate2(fun, x, y, z): return outData -def _integrateLOS(stepSize, wet_pw, hydro_pw, Npts=None): - delays = [] - for d in (wet_pw, hydro_pw): - if d.ndim == 1: - delays.append(np.array([int_fcn(d, stepSize)])) - else: - delays.append(_integrate_delays(stepSize, d, Npts)) - return np.stack(delays, axis=0) - - -def _integrate_delays(stepSize, refr, Npts=None): +def get_output_spacing(cube_spacing_m, weather_model_file, wm_proj, out_crs): ''' - This function gets the actual delays by integrating the refractivity in - each node. Refractivity is given in the 'refr' variable. + Return the output spacing for a cube. ''' - delays = [] - if Npts is not None: - for n, ray in zip(Npts, refr): - delays.append(int_fcn(ray, stepSize, n)) - else: - for ray in refr: - delays.append(int_fcn(ray, stepSize)) - return np.array(delays) - + # Calculate the appropriate cube spacing from the weather model + if cube_spacing_m is None: + with xarray.load_dataset(weather_model_file) as ds: + xpts = ds.x.values + ypts = ds.y.values + cube_spacing_m = np.nanmean([np.nanmean(np.diff(xpts)), np.nanmean(np.diff(ypts))]) + if wm_proj.axis_info[0].unit_name == "degree": + cube_spacing_m = cube_spacing_m * 1.0e5 # Scale by 100km -def int_fcn(y, dx, N=None): - return 1e-6 * dx * np.nansum(y[:N]) + if out_crs.axis_info[0].unit_name == "degree": + out_spacing = cube_spacing_m / 1e5 # Scale by 100km + else: + out_spacing = cube_spacing_m + + return out_spacing \ No newline at end of file diff --git a/tools/RAiDER/dem.py b/tools/RAiDER/dem.py index 0327f4bab..5d0d33dd2 100644 --- a/tools/RAiDER/dem.py +++ b/tools/RAiDER/dem.py @@ -40,16 +40,12 @@ def getHeights(ll_bounds, dem_type, dem_file, lats=None, lons=None): hts = None elif (dem_type == 'download') or (dem_type == 'dem'): - if ~os.path.exists(dem_file): - download_dem(ll_bounds, writeDEM=True, outName=dem_file) - - #TODO: interpolate heights to query lats/lons + zvals, metadata = download_dem(ll_bounds, writeDEM=True, outName=dem_file) + + #TODO: check this + lons, lats = np.meshgrid(lons, lats) # Interpolate to the query points - hts = interpolateDEM( - dem_file, - lats, - lons, - ) + hts = interpolateDEM(zvals, metadata['transform'], (lats, lons), method='nearest') return hts diff --git a/tools/RAiDER/etc/entrypoint.sh b/tools/RAiDER/etc/entrypoint.sh new file mode 100755 index 000000000..eb705773e --- /dev/null +++ b/tools/RAiDER/etc/entrypoint.sh @@ -0,0 +1,4 @@ +#!/bin/bash --login +set -e +conda activate RAiDER +exec raider.py "$@" diff --git a/tools/RAiDER/interpolator.py b/tools/RAiDER/interpolator.py index e5321d9d6..61f2b46d0 100644 --- a/tools/RAiDER/interpolator.py +++ b/tools/RAiDER/interpolator.py @@ -107,18 +107,16 @@ def fillna3D(array, axis=-1): return np.moveaxis(out, -1, axis) -def interpolateDEM(demRaster, extent, outLL, method='linear'): - ''' Interpolate a DEM raster to a set of lat/lon query points ''' - minlat, maxlat, minlon, maxlon = extent - nPixLat = demRaster.shape[0] - nPixLon = demRaster.shape[1] - xlats = np.linspace(minlat, maxlat, nPixLat) - xlons = np.linspace(minlon, maxlon, nPixLon) - interpolator = rgi( - points=(xlats, xlons), - values=demRaster, - method=method, - bounds_error=False - ) - outInterp = interpolator(outLL) - return outInterp +def interpolateDEM(demFile, outLL, method='nearest'): + """ Interpolate a DEM raster to a set of lat/lon query points using rioxarray + + outLL will be a tuple of (lats, lons). lats/lons can either be 1D arrays or 2 + For now will only use first row/col of 2D + """ + import rioxarray as xrr + da_dem = xrr.open_rasterio(demFile, band_as_variable=True)['band_1'] + lats, lons = outLL + lats = lats[:, 0] if lats.ndim==2 else lats + lons = lons[0, :] if lons.ndim==2 else lons + z_out = da_dem.interp(y=np.sort(lats)[::-1], x=lons).data + return z_out \ No newline at end of file diff --git a/tools/RAiDER/llreader.py b/tools/RAiDER/llreader.py index a8d3ff01d..337ab8cbb 100644 --- a/tools/RAiDER/llreader.py +++ b/tools/RAiDER/llreader.py @@ -23,9 +23,15 @@ class AOI(object): ''' - This instantiates a generic AOI class object + This instantiates a generic AOI class object. + + Attributes: + _bounding_box - S N W E bounding box + _proj - pyproj-compatible CRS + _type - Type of AOI ''' def __init__(self): + self._output_directory = os.getcwd() self._bounding_box = None self._proj = CRS.from_epsg(4326) self._geotransform = None @@ -63,11 +69,18 @@ def add_buffer(self, buffer): return ll_bounds + def set_output_directory(self, output_directory): + self._output_directory = output_directory + return + + + class StationFile(AOI): '''Use a .csv file containing at least Lat, Lon, and optionally Hgt_m columns''' - def __init__(self, station_file): + def __init__(self, station_file, demFile=None): super().__init__() self._filename = station_file + self._demfile = demFile self._bounding_box = bounds_from_csv(station_file) self._type = 'station_file' @@ -78,13 +91,26 @@ def readLL(self): def readZ(self): - df = pd.read_csv(self._filename) + df = pd.read_csv(self._filename).drop_duplicates(subset=["Lat", "Lon"]) if 'Hgt_m' in df.columns: return df['Hgt_m'].values else: - zvals, metadata = download_dem(self._bounding_box) - z_bounds = get_bbox(metadata) - z_out = interpolateDEM(zvals, z_bounds, self.readLL(), method='nearest') + demFile = os.path.join(self._output_directory, 'GLO30_fullres_dem.tif') \ + if self._demfile is None else self._demfile + + zvals, metadata = download_dem( + self._bounding_box, + writeDEM=True, + outName=demFile, + ) + ## select instead + z_out0 = interpolateDEM(demFile, self.readLL()) + if np.isnan(z_out0).all(): + raise Exception('DEM interpolation failed. Check DEM bounds and station coords.') + + + # the diagonal is the actual stations coordinates + z_out = np.diag(z_out0) df['Hgt_m'] = z_out df.to_csv(self._filename, index=False) self.__init__(self._filename) @@ -97,7 +123,7 @@ def __init__(self, lat_file, lon_file=None, hgt_file=None, dem_file=None, conven self._type = 'radar_rasters' self._latfile = lat_file self._lonfile = lon_file - + if (self._latfile is None) and (self._lonfile is None): raise ValueError('You need to specify a 2-band file or two single-band files') @@ -117,10 +143,12 @@ def __init__(self, lat_file, lon_file=None, hgt_file=None, dem_file=None, conven def readLL(self): # allow for 2-band lat/lon raster + lats = rio_open(self._latfile) + if self._lonfile is None: - return rio_open(self._latfile) + return lats else: - return rio_open(self._latfile), rio_open(self._lonfile) + return lats, rio_open(self._lonfile) def readZ(self): @@ -129,14 +157,16 @@ def readZ(self): return rio_open(self._hgtfile) else: - demFile = 'GLO30_fullres_dem.tif' if self._demfile is None else self._demfile + demFile = os.path.join(self._output_directory, 'GLO30_fullres_dem.tif') \ + if self._demfile is None else self._demfile + zvals, metadata = download_dem( self._bounding_box, writeDEM=True, - outName=os.path.join(demFile), + outName=demFile, ) - z_bounds = get_bbox(metadata) - z_out = interpolateDEM(zvals, z_bounds, self.readLL(), method='nearest') + z_out = interpolateDEM(demFile, self.readLL()) + return z_out @@ -176,8 +206,9 @@ def readZ(self): demFile = self._filename if self._is_dem else 'GLO30_fullres_dem.tif' bbox = self._bounding_box zvals, metadata = download_dem(bbox, writeDEM=True, outName=demFile) - z_bounds = get_bbox(metadata) - z_out = interpolateDEM(zvals, z_bounds, self.readLL(), method='nearest') + z_out = interpolateDEM(demFile, self.readLL()) + + return z_out @@ -247,14 +278,4 @@ def bounds_from_csv(station_file): if 'Hgt_m' in stats.columns: use_csv_heights = True snwe = [stats['Lat'].min(), stats['Lat'].max(), stats['Lon'].min(), stats['Lon'].max()] - return snwe - - -def get_bbox(p): - lon_w = p['transform'][2] - lat_n = p['transform'][5] - pix_lon = p['transform'][0] - pix_lat = p['transform'][4] - lon_e = lon_w + p['width'] * pix_lon - lat_s = lat_n + p['width'] * pix_lat - return lat_s, lat_n, lon_w, lon_e + return snwe \ No newline at end of file diff --git a/tools/RAiDER/losreader.py b/tools/RAiDER/losreader.py index a88ba83b9..e6006729c 100644 --- a/tools/RAiDER/losreader.py +++ b/tools/RAiDER/losreader.py @@ -603,9 +603,9 @@ def get_radar_pos(llh, orb): except Exception as e: raise e + + # in case nans in hgt field else: - # in case first pixel has nan - sat_xyz[ind, :] = np.nan sr[ind] = np.nan output[ind, ...] = np.nan diff --git a/tools/RAiDER/models/plotWeather.py b/tools/RAiDER/models/plotWeather.py index 011f62330..142b4d5cc 100755 --- a/tools/RAiDER/models/plotWeather.py +++ b/tools/RAiDER/models/plotWeather.py @@ -47,7 +47,7 @@ def plot_pqt(weatherObj, savefig=True, z1=500, z2=15000): # setup the plot f = plt.figure(figsize=(18, 14)) - f.suptitle('{} Pressure/Humidity/Temperature at height {}m and {}m (values should drop as elevation increases)'.format(weatherObj._Name, z1, z2)) + f.suptitle(f'{weatherObj._Name} Pressure/Humidity/Temperature at height {z1}m and {z1}m (values should drop as elevation increases)') xind = int(np.floor(weatherObj._xs.shape[0] / 2)) yind = int(np.floor(weatherObj._ys.shape[0] / 2)) @@ -129,12 +129,13 @@ def plot_wh(weatherObj, savefig=True, z1=500, z2=15000): # setup the plot f = plt.figure(figsize=(14, 10)) - f.suptitle('{} Wet and Hydrostatic refractivity at height {}m and {}m'.format(weatherObj._Name, z1, z2)) + f.suptitle(f'{weatherObj._Name} Wet and Hydrostatic refractivity at height {z1}m and {z1}m') # loop over each plot for ind, plot, title in zip(range(len(plots)), plots, titles): sp = f.add_subplot(2, 2, ind + 1) - im = sp.imshow(np.reshape(plot, x.shape), cmap='viridis', extent=[np.nanmin(x), np.nanmax(x), np.nanmin(y), np.nanmax(y)], origin='lower') + im = sp.imshow(np.reshape(plot, x.shape), cmap='viridis', + extent=[np.nanmin(x), np.nanmax(x), np.nanmin(y), np.nanmax(y)], origin='lower') divider = mal(sp) cax = divider.append_axes("right", size="4%", pad=0.05) plt.colorbar(im, cax=cax) @@ -145,5 +146,7 @@ def plot_wh(weatherObj, savefig=True, z1=500, z2=15000): sp.set_ylabel('{} m\n'.format(z2)) if savefig: - plt.savefig('{}_refractivity_hgt{}_and_{}m.pdf'.format(weatherObj._Name, z1, z2)) + wd = os.path.dirname(os.path.dirname(weatherObj._out_name)) + f = f'{weatherObj._Name}_refractivity_hgt{z1}_and_{z2}m.pdf' + plt.savefig(os.path.join(wd, f)) return f diff --git a/tools/RAiDER/utilFcns.py b/tools/RAiDER/utilFcns.py index 6b4b6b392..987a9cbfc 100755 --- a/tools/RAiDER/utilFcns.py +++ b/tools/RAiDER/utilFcns.py @@ -2,6 +2,7 @@ import multiprocessing as mp import os import re +import xarray from datetime import datetime, timedelta @@ -222,23 +223,76 @@ def get_file_and_band(filestr): ) -def writeResultsToHDF5(lats, lons, hgts, wet, hydro, filename, delayType=None): +def writeResultsToXarray(dt, xpts, ypts, zpts, crs, wetDelay, hydroDelay, weather_model_file, out_type): ''' write a 1-D array to a NETCDF5 file ''' - if delayType is None: - delayType = "Zenith" + # Modify this as needed for NISAR / other projects + ds = xarray.Dataset( + data_vars=dict( + wet=(["z", "y", "x"], + wetDelay, + {"units" : "m", + "description": f"wet {out_type} delay", + # 'crs': crs.to_epsg(), + "grid_mapping": "crs", + + }), + hydro=(["z", "y", "x"], + hydroDelay, + {"units": "m", + # 'crs': crs.to_epsg(), + "description": f"hydrostatic {out_type} delay", + "grid_mapping": "crs", + }), + ), + coords=dict( + x=(["x"], xpts), + y=(["y"], ypts), + z=(["z"], zpts), + ), + attrs=dict( + Conventions="CF-1.7", + title="RAiDER geo cube", + source=os.path.basename(weather_model_file), + history=str(datetime.utcnow()) + " RAiDER", + description=f"RAiDER geo cube - {out_type}", + reference_time=str(dt), + ), + ) + + # Write projection system mapping + ds["crs"] = int(-2147483647) # dummy placeholder + for k, v in crs.to_cf().items(): + ds.crs.attrs[k] = v + + # Write z-axis information + ds.z.attrs["axis"] = "Z" + ds.z.attrs["units"] = "m" + ds.z.attrs["description"] = "height above ellipsoid" + + # If in degrees + if crs.axis_info[0].unit_name == "degree": + ds.y.attrs["units"] = "degrees_north" + ds.y.attrs["standard_name"] = "latitude" + ds.y.attrs["long_name"] = "latitude" + + ds.x.attrs["units"] = "degrees_east" + ds.x.attrs["standard_name"] = "longitude" + ds.x.attrs["long_name"] = "longitude" + + else: + ds.y.attrs["axis"] = "Y" + ds.y.attrs["standard_name"] = "projection_y_coordinate" + ds.y.attrs["long_name"] = "y-coordinate in projected coordinate system" + ds.y.attrs["units"] = "m" - with h5py.File(filename, 'w') as f: - f['lat'] = lats - f['lon'] = lons - f['hgts'] = hgts - f['wetDelay'] = wet - f['hydroDelay'] = hydro - f['wetDelayUnit'] = "m" - f['hydroDelayUnit'] = "m" - f['hgtsUnit'] = "m" - f.attrs['DelayType'] = delayType + ds.x.attrs["axis"] = "X" + ds.x.attrs["standard_name"] = "projection_x_coordinate" + ds.x.attrs["long_name"] = "x-coordinate in projected coordinate system" + ds.x.attrs["units"] = "m" + + return ds def writeArrayToRaster(array, filename, noDataValue=0., fmt='ENVI', proj=None, gt=None): @@ -449,10 +503,11 @@ def writeDelays(aoi, wetDelay, hydroDelay, # Do different things, depending on the type of input if aoi.type() == 'station_file': + #TODO: why is this a try/except? try: - df = pd.read_csv(aoi._filename) + df = pd.read_csv(aoi._filename).drop_duplicates(subset=["Lat", "Lon"]) except ValueError: - df = pd.read_csv(aoi._filename) + df = pd.read_csv(aoi._filename).drop_duplicates(subset=["Lat", "Lon"]) df['wetDelay'] = wetDelay df['hydroDelay'] = hydroDelay @@ -676,7 +731,7 @@ def UTM_to_WGS84(z, l, x, y): return np.reshape(lon, shp), np.reshape(lat, shp) -def transform_bbox(wesn, dest_crs=4326, src_crs=4326, margin=100.): +def transform_bbox(snwe_in, dest_crs=4326, src_crs=4326, margin=100.): """ Transform bbox to lat/lon or another CRS for use with rest of workflow Returns: SNWE @@ -698,12 +753,11 @@ def transform_bbox(wesn, dest_crs=4326, src_crs=4326, margin=100.): # If dest_crs is same as src_crs if dest_crs == src_crs: - snwe = [wesn[2], wesn[3], wesn[0], wesn[1]] - return snwe + return snwe_in T = Transformer.from_crs(src_crs, dest_crs, always_xy=True) - xs = np.linspace(wesn[0]-margin, wesn[1]+margin, num=11) - ys = np.linspace(wesn[2]-margin, wesn[3]+margin, num=11) + xs = np.linspace(snwe_in[2]-margin, snwe_in[3]+margin, num=11) + ys = np.linspace(snwe_in[0]-margin, snwe_in[1]+margin, num=11) X, Y = np.meshgrid(xs, ys) # Transform to lat/lon