Skip to content

Commit

Permalink
FIX: Many warning fixes for ACT. (#822)
Browse files Browse the repository at this point in the history
* FIX: Many warning fixes for ACT.
These fixes include datetime utc changes, missing images, xarray.dims to sizes changes and more.

* FIX: Forgot to remove 0 index.

* STY: PEP8 fixes.

* FIX: Use timezone import.

* FIX: Need to check length of coord arrays, when casting to float.

* STY: PEP8 fixes.
  • Loading branch information
zssherman authored Apr 15, 2024
1 parent 3046a76 commit 60b86f4
Show file tree
Hide file tree
Showing 12 changed files with 40 additions and 26 deletions.
5 changes: 2 additions & 3 deletions act/discovery/cropscape.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,9 @@ def get_crop_type(lat=None, lon=None, year=None):
# Add year, lat, and lon as parameters
params = {'year': str(year), 'x': str(x), 'y': str(y)}

# Perform the request. Note, verify set to False until
# server SSL errors can be worked out
# Perform the request.
try:
req = requests.get(url, params=params, verify=False, timeout=1)
req = requests.get(url, params=params, timeout=1)
except Exception:
return

Expand Down
20 changes: 16 additions & 4 deletions act/io/neon.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,18 @@ def read_neon_csv(files, variable_files=None, position_files=None):
dloc = loc_df.loc[loc_df['HOR.VER'] == hor_loc + '.' + ver_loc]
idx = dloc.index.values
if len(idx) > 0:
ds['lat'] = xr.DataArray(data=float(loc_df['referenceLatitude'].values[idx]))
ds['lon'] = xr.DataArray(data=float(loc_df['referenceLongitude'].values[idx]))
ds['alt'] = xr.DataArray(data=float(loc_df['referenceElevation'].values[idx]))
if len(loc_df['referenceLatitude'].values) > 1:
ds['lat'] = xr.DataArray(data=float(loc_df['referenceLatitude'].values[idx][0]))
ds['lon'] = xr.DataArray(
data=float(loc_df['referenceLongitude'].values[idx][0])
)
ds['alt'] = xr.DataArray(
data=float(loc_df['referenceElevation'].values[idx][0])
)
else:
ds['lat'] = xr.DataArray(data=float(loc_df['referenceLatitude'].values[idx]))
ds['lon'] = xr.DataArray(data=float(loc_df['referenceLongitude'].values[idx]))
ds['alt'] = xr.DataArray(data=float(loc_df['referenceElevation'].values[idx]))
variables = [
'xOffset',
'yOffset',
Expand All @@ -98,7 +107,10 @@ def read_neon_csv(files, variable_files=None, position_files=None):
'yAzimuth',
]
for v in variables:
ds[v] = xr.DataArray(data=float(loc_df[v].values[idx]))
if len(loc_df[v].values) > 1:
ds[v] = xr.DataArray(data=float(loc_df[v].values[idx][0]))
else:
ds[v] = xr.DataArray(data=float(loc_df[v].values[idx]))
multi_ds.append(ds)

ds = xr.merge(multi_ds)
Expand Down
8 changes: 4 additions & 4 deletions act/io/noaapsl.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def _parse_psl_wind_lines(filepath, lines, line_offset=0):
beam_elevation = np.array([beam_elevation1, beam_elevation2, beam_elevation3], dtype='float32')

# Read in the data table section using pandas
df = pd.read_csv(filepath, skiprows=line_offset + 10, delim_whitespace=True)
df = pd.read_csv(filepath, skiprows=line_offset + 10, sep=r'\s+')

# Only read in the number of rows for a given set of gates
df = df.iloc[: int(number_of_range_gates)]
Expand Down Expand Up @@ -326,7 +326,7 @@ def _parse_psl_temperature_lines(filepath, lines, line_offset=0):
beam_azimuth, beam_elevation = filter_list(lines[8].split(' ')).astype(float)

# Read in the data table section using pandas
df = pd.read_csv(filepath, skiprows=line_offset + 10, delim_whitespace=True)
df = pd.read_csv(filepath, skiprows=line_offset + 10, sep=r'\s+')

# Only read in the number of rows for a given set of gates
df = df.iloc[: int(number_of_gates)]
Expand Down Expand Up @@ -1115,7 +1115,7 @@ def _parse_psl_radar_moments(files):
f,
skiprows=[0, 1, 2],
nrows=int(data['n_gates']['data'][-1]) - 1,
delim_whitespace=True,
sep=r'\s+',
names=list(names.keys()),
)
index2 = 0
Expand Down Expand Up @@ -1143,7 +1143,7 @@ def _parse_psl_radar_moments(files):
f,
skiprows=list(range(index2 + 1)),
nrows=int(data['n_gates']['data'][-1]) - 1,
delim_whitespace=True,
sep=r'\s+',
names=list(names.keys()),
)

Expand Down
2 changes: 1 addition & 1 deletion act/utils/data_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,7 @@ def assign_coordinates(ds, coord_list):
if coord not in ds.variables.keys():
raise KeyError(coord + ' is not a variable in the Dataset.')

if ds.dims[coord_list[coord]] != len(ds.variables[coord]):
if ds.sizes[coord_list[coord]] != len(ds.variables[coord]):
raise IndexError(
coord + ' must have the same ' + 'value as length of ' + coord_list[coord]
)
Expand Down
6 changes: 3 additions & 3 deletions tests/io/test_hysplit.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ def test_read_hysplit():
assert 'lon' in ds.variables.keys()
assert 'alt' in ds.variables.keys()
assert 'PRESSURE' in ds.variables.keys()
assert ds.dims["num_grids"] == 8
assert ds.dims["num_trajectories"] == 1
assert ds.dims['time'] == 121
assert ds.sizes["num_grids"] == 8
assert ds.sizes["num_trajectories"] == 1
assert ds.sizes['time'] == 121
assert ds['age'].min() == -120
12 changes: 6 additions & 6 deletions tests/io/test_noaapsl.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@ def test_read_psl_wind_profiler():
act.tests.EXAMPLE_NOAA_PSL, transpose=False
)
# test dimensions
assert 'time' and 'HT' in test_ds_low.dims.keys()
assert 'time' and 'HT' in test_ds_hi.dims.keys()
assert test_ds_low.dims['time'] == 4
assert test_ds_hi.dims['time'] == 4
assert test_ds_low.dims['HT'] == 49
assert test_ds_hi.dims['HT'] == 50
assert 'time' and 'HT' in test_ds_low.sizes.keys()
assert 'time' and 'HT' in test_ds_hi.sizes.keys()
assert test_ds_low.sizes['time'] == 4
assert test_ds_hi.sizes['time'] == 4
assert test_ds_low.sizes['HT'] == 49
assert test_ds_hi.sizes['HT'] == 50

# test coordinates
assert (test_ds_low.coords['HT'][0:5] == np.array([0.151, 0.254, 0.356, 0.458, 0.561])).all()
Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
3 changes: 3 additions & 0 deletions tests/plotting/test_timeseriesdisplay.py
Original file line number Diff line number Diff line change
Expand Up @@ -423,6 +423,7 @@ def test_plot_barbs_from_u_v2():
matplotlib.pyplot.close(BarbDisplay.fig)


@pytest.mark.mpl_image_compare(tolerance=10)
def test_plot_barbs_from_u_v3():
bins = list(np.linspace(0, 1, 10))
xbins = list(pd.date_range(pd.to_datetime('2020-01-01'), pd.to_datetime('2020-01-02'), 12))
Expand All @@ -446,6 +447,7 @@ def test_plot_barbs_from_u_v3():
matplotlib.pyplot.close(BarbDisplay.fig)


@pytest.mark.mpl_image_compare(tolerance=10)
def test_plot_barbs_from_u_v4():
bins = list(np.linspace(0, 1, 10))
xbins = [pd.to_datetime('2020-01-01')]
Expand All @@ -471,6 +473,7 @@ def test_plot_barbs_from_u_v4():
matplotlib.pyplot.close(BarbDisplay.fig)


@pytest.mark.mpl_image_compare(tolerance=10)
def test_plot_barbs_from_u_v5():
bins = list(np.linspace(0, 1, 10))
xbins = [pd.to_datetime('2020-01-01')]
Expand Down
6 changes: 3 additions & 3 deletions tests/qc/test_qcfilter.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import copy
from datetime import datetime
from datetime import datetime, timezone

import dask.array as da
import numpy as np
Expand Down Expand Up @@ -390,12 +390,12 @@ def test_qc_speed():
coords={'time': time},
)

start = datetime.utcnow()
start = datetime.now(timezone.utc)
for name, var in noisy_data_mapping.items():
failed_qc = var > 0.75 # Consider data above 0.75 as bad. Negligible time here.
ds.qcfilter.add_test(name, index=failed_qc, test_meaning='Value above threshold')

time_diff = datetime.utcnow() - start
time_diff = datetime.now(timezone.utc) - start
assert time_diff.seconds <= 4


Expand Down
4 changes: 2 additions & 2 deletions tests/utils/test_datetime_utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime
from datetime import datetime, timezone

import numpy as np
import pandas as pd
Expand All @@ -15,7 +15,7 @@ def test_dates_between():
answer = np.arange(start_string, end_string, dtype='datetime64[D]')
answer = np.append(answer, answer[-1] + 1)
answer = answer.astype('datetime64[s]').astype(int)
answer = [datetime.utcfromtimestamp(ii) for ii in answer]
answer = [datetime.fromtimestamp(ii, tz=timezone.utc).replace(tzinfo=None) for ii in answer]

assert date_list == answer

Expand Down

0 comments on commit 60b86f4

Please sign in to comment.