diff --git a/act/discovery/cropscape.py b/act/discovery/cropscape.py index 07308929f9..e50b4ff52b 100644 --- a/act/discovery/cropscape.py +++ b/act/discovery/cropscape.py @@ -89,10 +89,9 @@ def get_crop_type(lat=None, lon=None, year=None): # Add year, lat, and lon as parameters params = {'year': str(year), 'x': str(x), 'y': str(y)} - # Perform the request. Note, verify set to False until - # server SSL errors can be worked out + # Perform the request. try: - req = requests.get(url, params=params, verify=False, timeout=1) + req = requests.get(url, params=params, timeout=1) except Exception: return diff --git a/act/io/neon.py b/act/io/neon.py index 284bae0353..fdd808bfeb 100644 --- a/act/io/neon.py +++ b/act/io/neon.py @@ -3,6 +3,7 @@ """ +import numpy as np import pandas as pd import xarray as xr @@ -82,9 +83,9 @@ def read_neon_csv(files, variable_files=None, position_files=None): dloc = loc_df.loc[loc_df['HOR.VER'] == hor_loc + '.' + ver_loc] idx = dloc.index.values if len(idx) > 0: - ds['lat'] = xr.DataArray(data=float(loc_df['referenceLatitude'].values[idx])) - ds['lon'] = xr.DataArray(data=float(loc_df['referenceLongitude'].values[idx])) - ds['alt'] = xr.DataArray(data=float(loc_df['referenceElevation'].values[idx])) + ds['lat'] = xr.DataArray(data=np.float64(loc_df['referenceLatitude'].values[idx][0])) + ds['lon'] = xr.DataArray(data=np.float64(loc_df['referenceLongitude'].values[idx][0])) + ds['alt'] = xr.DataArray(data=np.float64(loc_df['referenceElevation'].values[idx][0])) variables = [ 'xOffset', 'yOffset', @@ -98,7 +99,7 @@ def read_neon_csv(files, variable_files=None, position_files=None): 'yAzimuth', ] for v in variables: - ds[v] = xr.DataArray(data=float(loc_df[v].values[idx])) + ds[v] = xr.DataArray(data=np.float64(loc_df[v].values[idx])) multi_ds.append(ds) ds = xr.merge(multi_ds) diff --git a/act/io/noaapsl.py b/act/io/noaapsl.py index a5e2da5e46..c5b3500b08 100644 --- a/act/io/noaapsl.py +++ b/act/io/noaapsl.py @@ -188,7 +188,7 @@ def _parse_psl_wind_lines(filepath, lines, line_offset=0): beam_elevation = np.array([beam_elevation1, beam_elevation2, beam_elevation3], dtype='float32') # Read in the data table section using pandas - df = pd.read_csv(filepath, skiprows=line_offset + 10, delim_whitespace=True) + df = pd.read_csv(filepath, skiprows=line_offset + 10, sep='\s+') # Only read in the number of rows for a given set of gates df = df.iloc[: int(number_of_range_gates)] @@ -326,7 +326,7 @@ def _parse_psl_temperature_lines(filepath, lines, line_offset=0): beam_azimuth, beam_elevation = filter_list(lines[8].split(' ')).astype(float) # Read in the data table section using pandas - df = pd.read_csv(filepath, skiprows=line_offset + 10, delim_whitespace=True) + df = pd.read_csv(filepath, skiprows=line_offset + 10, sep='\s+') # Only read in the number of rows for a given set of gates df = df.iloc[: int(number_of_gates)] @@ -1115,7 +1115,7 @@ def _parse_psl_radar_moments(files): f, skiprows=[0, 1, 2], nrows=int(data['n_gates']['data'][-1]) - 1, - delim_whitespace=True, + sep='\s+', names=list(names.keys()), ) index2 = 0 @@ -1143,7 +1143,7 @@ def _parse_psl_radar_moments(files): f, skiprows=list(range(index2 + 1)), nrows=int(data['n_gates']['data'][-1]) - 1, - delim_whitespace=True, + sep='\s+', names=list(names.keys()), ) diff --git a/act/utils/data_utils.py b/act/utils/data_utils.py index abb3cff0a9..17682c6187 100644 --- a/act/utils/data_utils.py +++ b/act/utils/data_utils.py @@ -383,7 +383,7 @@ def assign_coordinates(ds, coord_list): if coord not in ds.variables.keys(): raise KeyError(coord + ' is not a variable in the Dataset.') - if ds.dims[coord_list[coord]] != len(ds.variables[coord]): + if ds.sizes[coord_list[coord]] != len(ds.variables[coord]): raise IndexError( coord + ' must have the same ' + 'value as length of ' + coord_list[coord] ) diff --git a/tests/io/test_hysplit.py b/tests/io/test_hysplit.py index 162fe49a1c..c5df67d5aa 100644 --- a/tests/io/test_hysplit.py +++ b/tests/io/test_hysplit.py @@ -10,7 +10,7 @@ def test_read_hysplit(): assert 'lon' in ds.variables.keys() assert 'alt' in ds.variables.keys() assert 'PRESSURE' in ds.variables.keys() - assert ds.dims["num_grids"] == 8 - assert ds.dims["num_trajectories"] == 1 - assert ds.dims['time'] == 121 + assert ds.sizes["num_grids"] == 8 + assert ds.sizes["num_trajectories"] == 1 + assert ds.sizes['time'] == 121 assert ds['age'].min() == -120 diff --git a/tests/io/test_noaapsl.py b/tests/io/test_noaapsl.py index 35326ea592..2964b5c879 100644 --- a/tests/io/test_noaapsl.py +++ b/tests/io/test_noaapsl.py @@ -11,12 +11,12 @@ def test_read_psl_wind_profiler(): act.tests.EXAMPLE_NOAA_PSL, transpose=False ) # test dimensions - assert 'time' and 'HT' in test_ds_low.dims.keys() - assert 'time' and 'HT' in test_ds_hi.dims.keys() - assert test_ds_low.dims['time'] == 4 - assert test_ds_hi.dims['time'] == 4 - assert test_ds_low.dims['HT'] == 49 - assert test_ds_hi.dims['HT'] == 50 + assert 'time' and 'HT' in test_ds_low.sizes.keys() + assert 'time' and 'HT' in test_ds_hi.sizes.keys() + assert test_ds_low.sizes['time'] == 4 + assert test_ds_hi.sizes['time'] == 4 + assert test_ds_low.sizes['HT'] == 49 + assert test_ds_hi.sizes['HT'] == 50 # test coordinates assert (test_ds_low.coords['HT'][0:5] == np.array([0.151, 0.254, 0.356, 0.458, 0.561])).all() diff --git a/tests/plotting/baseline/test_plot_barbs_from_u_v3.png b/tests/plotting/baseline/test_plot_barbs_from_u_v3.png new file mode 100644 index 0000000000..7504c32894 Binary files /dev/null and b/tests/plotting/baseline/test_plot_barbs_from_u_v3.png differ diff --git a/tests/plotting/baseline/test_plot_barbs_from_u_v4.png b/tests/plotting/baseline/test_plot_barbs_from_u_v4.png new file mode 100644 index 0000000000..c5f07621bb Binary files /dev/null and b/tests/plotting/baseline/test_plot_barbs_from_u_v4.png differ diff --git a/tests/plotting/baseline/test_plot_barbs_from_u_v5.png b/tests/plotting/baseline/test_plot_barbs_from_u_v5.png new file mode 100644 index 0000000000..55fa440b50 Binary files /dev/null and b/tests/plotting/baseline/test_plot_barbs_from_u_v5.png differ diff --git a/tests/plotting/test_timeseriesdisplay.py b/tests/plotting/test_timeseriesdisplay.py index 2b43398bf1..9c237b7001 100644 --- a/tests/plotting/test_timeseriesdisplay.py +++ b/tests/plotting/test_timeseriesdisplay.py @@ -423,6 +423,7 @@ def test_plot_barbs_from_u_v2(): matplotlib.pyplot.close(BarbDisplay.fig) +@pytest.mark.mpl_image_compare(tolerance=10) def test_plot_barbs_from_u_v3(): bins = list(np.linspace(0, 1, 10)) xbins = list(pd.date_range(pd.to_datetime('2020-01-01'), pd.to_datetime('2020-01-02'), 12)) @@ -446,6 +447,7 @@ def test_plot_barbs_from_u_v3(): matplotlib.pyplot.close(BarbDisplay.fig) +@pytest.mark.mpl_image_compare(tolerance=10) def test_plot_barbs_from_u_v4(): bins = list(np.linspace(0, 1, 10)) xbins = [pd.to_datetime('2020-01-01')] @@ -471,6 +473,7 @@ def test_plot_barbs_from_u_v4(): matplotlib.pyplot.close(BarbDisplay.fig) +@pytest.mark.mpl_image_compare(tolerance=10) def test_plot_barbs_from_u_v5(): bins = list(np.linspace(0, 1, 10)) xbins = [pd.to_datetime('2020-01-01')] diff --git a/tests/qc/test_qcfilter.py b/tests/qc/test_qcfilter.py index 99311ec589..1cb5b3cf29 100644 --- a/tests/qc/test_qcfilter.py +++ b/tests/qc/test_qcfilter.py @@ -1,5 +1,5 @@ import copy -from datetime import datetime +import datetime import dask.array as da import numpy as np @@ -390,12 +390,12 @@ def test_qc_speed(): coords={'time': time}, ) - start = datetime.utcnow() + start = datetime.datetime.now(datetime.UTC) for name, var in noisy_data_mapping.items(): failed_qc = var > 0.75 # Consider data above 0.75 as bad. Negligible time here. ds.qcfilter.add_test(name, index=failed_qc, test_meaning='Value above threshold') - time_diff = datetime.utcnow() - start + time_diff = datetime.datetime.now(datetime.UTC) - start assert time_diff.seconds <= 4 diff --git a/tests/utils/test_datetime_utils.py b/tests/utils/test_datetime_utils.py index ce03f21a83..d6ab22d642 100644 --- a/tests/utils/test_datetime_utils.py +++ b/tests/utils/test_datetime_utils.py @@ -15,7 +15,7 @@ def test_dates_between(): answer = np.arange(start_string, end_string, dtype='datetime64[D]') answer = np.append(answer, answer[-1] + 1) answer = answer.astype('datetime64[s]').astype(int) - answer = [datetime.utcfromtimestamp(ii) for ii in answer] + answer = [datetime.fromtimestamp(ii, datetime.UTC) for ii in answer] assert date_list == answer