Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test cleanup and fixes #928

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions spectral_cube/dask_spectral_cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -636,7 +636,6 @@ def median(self, axis=None, **kwargs):
if axis is None:
# da.nanmedian raises NotImplementedError since it is not possible
# to do efficiently, so we use Numpy instead.
self._warn_slow('median')
return np.nanmedian(self._compute(data), **kwargs)
else:
return self._compute(da.nanmedian(self._get_filled_data(fill=np.nan), axis=axis, **kwargs))
Expand All @@ -660,7 +659,6 @@ def percentile(self, q, axis=None, **kwargs):
if axis is None:
# There is no way to compute the percentile of the whole array in
# chunks.
self._warn_slow('percentile')
return np.nanpercentile(data, q, **kwargs)
else:
# Rechunk so that there is only one chunk along the desired axis
Expand Down Expand Up @@ -694,7 +692,6 @@ def mad_std(self, axis=None, ignore_nan=True, **kwargs):
if axis is None:
# In this case we have to load the full data - even dask's
# nanmedian doesn't work efficiently over the whole array.
self._warn_slow('mad_std')
return stats.mad_std(data, ignore_nan=ignore_nan, **kwargs)
else:
# Rechunk so that there is only one chunk along the desired axis
Expand Down
85 changes: 43 additions & 42 deletions spectral_cube/tests/data/header_jybeam.hdr
Original file line number Diff line number Diff line change
@@ -1,50 +1,51 @@
SIMPLE = T / Written by IDL: Fri Feb 20 13:46:36 2009
BITPIX = -32 /
NAXIS = 4 /
NAXIS1 = 1884 /
NAXIS2 = 2606 /
NAXIS3 = 200 //
NAXIS4 = 1 /
EXTEND = T /
BSCALE = 1.00000000000E+00 /
BZERO = 0.00000000000E+00 /
TELESCOP= 'VLA ' /
CDELT1 = -5.55555561268E-04 /
CRPIX1 = 1.37300000000E+03 /
CRVAL1 = 2.31837500515E+01 /
SIMPLE = T / Written by IDL: Fri Feb 20 13:46:36 2009
BITPIX = -32 /
NAXIS = 4 /
NAXIS1 = 1884 /
NAXIS2 = 2606 /
NAXIS3 = 200 //
NAXIS4 = 1 /
EXTEND = T /
BSCALE = 1.00000000000E+00 /
BZERO = 0.00000000000E+00 /
TELESCOP= 'VLA ' /
CDELT1 = -5.55555561268E-04 /
CRPIX1 = 1.37300000000E+03 /
CRVAL1 = 2.31837500515E+01 /
CUNIT1 = 'deg'
CTYPE1 = 'RA---SIN' /
CDELT2 = 5.55555561268E-04 /
CRPIX2 = 1.15200000000E+03 /
CRVAL2 = 3.05765277962E+01 /
CTYPE1 = 'RA---SIN' /
CDELT2 = 5.55555561268E-04 /
CRPIX2 = 1.15200000000E+03 /
CRVAL2 = 3.05765277962E+01 /
CUNIT2 = 'deg'
CTYPE2 = 'DEC--SIN' /
CDELT3 = 1.28821496879E+00 /
CRPIX3 = 1.00000000000E+00 /
CRVAL3 = -3.21214698632E+02 /
CTYPE3 = 'VOPT' /
CTYPE2 = 'DEC--SIN' /
CDELT3 = 1.28821496879E+00 /
CRPIX3 = 1.00000000000E+00 /
CRVAL3 = -3.21214698632E+02 /
CTYPE3 = 'VOPT' /
CUNIT3 = 'km/s'
CDELT4 = 1.00000000000E+00 /
CRPIX4 = 1.00000000000E+00 /
CRVAL4 = 1.00000000000E+00 /
CTYPE4 = 'STOKES ' /
CDELT4 = 1.00000000000E+00 /
CRPIX4 = 1.00000000000E+00 /
CRVAL4 = 1.00000000000E+00 /
CTYPE4 = 'STOKES ' /
CUNIT4 = ''
SPECSYS = 'BARYCENT'
DATE-OBS= '1998-06-18T16:30:25.4' /
RESTFREQ= 1.42040571841E+09 /
CELLSCAL= 'CONSTANT' /
BUNIT = 'JY/BEAM ' /
EPOCH = 2.00000000000E+03 /
OBJECT = 'M33 ' /
OBSERVER= 'AT206 ' /
VOBS = -2.57256763070E+01 /
LTYPE = 'channel ' /
LSTART = 2.15000000000E+02 /
LWIDTH = 1.00000000000E+00 /
LSTEP = 1.00000000000E+00 /
BTYPE = 'intensity' /
DATAMIN = -6.57081836835E-03 /
DATAMAX = 1.52362231165E-02 /
DATE-OBS= '1998-06-18T16:30:25.4' /
MJD-OBS = 50982.687794 /
RESTFREQ= 1.42040571841E+09 /
CELLSCAL= 'CONSTANT' /
BUNIT = 'JY/BEAM ' /
EPOCH = 2.00000000000E+03 /
OBJECT = 'M33 ' /
OBSERVER= 'AT206 ' /
VOBS = -2.57256763070E+01 /
LTYPE = 'channel ' /
LSTART = 2.15000000000E+02 /
LWIDTH = 1.00000000000E+00 /
LSTEP = 1.00000000000E+00 /
BTYPE = 'intensity' /
DATAMIN = -6.57081836835E-03 /
DATAMAX = 1.52362231165E-02 /
BMAJ = 0.0002777777777777778
BMIN = 0.0002777777777777778
BPA = 0.0
25 changes: 23 additions & 2 deletions spectral_cube/tests/test_dask.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,13 +106,34 @@ def test_statistics(data_adv):

def test_statistics_withnans(data_adv):
cube = DaskSpectralCube.read(data_adv).rechunk(chunks=(1, 2, 3))

# shape is 2, 3, 4
cube._data[:,:,:2] = np.nan
cube._data[:,:,:1] = np.nan
# ensure some chunks are all nan
cube.rechunk((1,2,2))
stats = cube.statistics()

for key in ('min', 'max', 'sum'):
assert stats[key] == getattr(cube, key)()
np.testing.assert_allclose(stats[key],
getattr(cube, key)(),
rtol=1e-10)


def test_statistics_allnans(data_adv):
cube = DaskSpectralCube.read(data_adv).rechunk(chunks=(1, 2, 3))

# shape is 2, 3, 4
cube._data[:,:,:2] = np.nan
# ensure some chunks are all nan
cube.rechunk((1,2,2))
stats = cube.statistics()

for key in ('min', 'max', 'mean', 'sigma', 'rms'):
assert np.isnan(stats[key])

# Sum of NaNs is 0
assert stats['sum'] == 0 * cube.unit
assert stats['sumsq'] == (0 * cube.unit)**2


@pytest.mark.skipif(not CASA_INSTALLED, reason='Requires CASA to be installed')
Expand Down
8 changes: 4 additions & 4 deletions spectral_cube/tests/test_spectral_cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,9 @@ def test_arithmetic_warning(data_vda_jybeam_lower, recwarn, use_dask):
with pytest.warns(UserWarning, match='requires loading the entire'):
cube + 5*cube.unit

with pytest.warns(UserWarning, match='requires loading the entire'):
cube + cube


def test_huge_disallowed(data_vda_jybeam_lower, use_dask):

Expand All @@ -133,10 +136,7 @@ def test_huge_disallowed(data_vda_jybeam_lower, use_dask):
with pytest.raises(ValueError, match='entire cube into memory'):
cube + 5*cube.unit

if use_dask:
with pytest.raises(ValueError, match='entire cube into memory'):
cube.mad_std()
else:
if not use_dask:
with pytest.raises(ValueError, match='entire cube into memory'):
cube.max(how='cube')

Expand Down
9 changes: 7 additions & 2 deletions spectral_cube/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,12 @@ def wrapper(self, *args, **kwargs):
accepts_how_keyword = 'how' in argspec.args or argspec.varkw == 'how'

warn_how = accepts_how_keyword and ((kwargs.get('how') == 'cube') or 'how' not in kwargs)


# This restores showing the "loading the entire cube into memory" warning for
# _apply_everywhere and _cube_on_cube_operation
if function.__name__ in ['_apply_everywhere', '_cube_on_cube_operation']:
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@keflavich -- can you check if this is consistent with the changes you made here: cb8dfbe

warn_how = True

if self._is_huge and not self.allow_huge_operations:
warn_message = ("This function ({0}) requires loading the entire "
"cube into memory, and the cube is large ({1} "
Expand All @@ -50,7 +55,7 @@ def wrapper(self, *args, **kwargs):
warn_message += ("Alternatively, you may want to consider using an "
"approach that does not load the whole cube into "
"memory by specifying how='slice' or how='ray'. ")

warn_message += ("See {bigdataurl} for details.".format(bigdataurl=bigdataurl))

raise ValueError(warn_message)
Expand Down
Loading