Skip to content

Commit

Permalink
make off tests pass on windows, more cleanup of temp file apis
Browse files Browse the repository at this point in the history
  • Loading branch information
ggggggggg committed May 22, 2024
1 parent d747e30 commit f06335a
Show file tree
Hide file tree
Showing 2 changed files with 72 additions and 73 deletions.
93 changes: 46 additions & 47 deletions tests/off/test_channels.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import pylab as plt
import lmfit
import h5py
import resource
# import resource
import tempfile

# Remove a warning message
Expand Down Expand Up @@ -452,7 +452,7 @@ def test_duplicate_cuts():
data.cutAdd("deliberateduplicate", lambda energy: energy < 750, overwrite=True)


def test_recipes():
def test_recipes(tmp_path):
rb = util.RecipeBook(baseIngredients=["x", "y", "z"], propertyClass=None,
coefs_dtype=None)

Expand All @@ -478,13 +478,13 @@ def funb(a, z):
assert rb._craftWithFunction(lambda a, b, c: a + b + c, args) == 18
assert rb.craft(lambda a, b, c: a + b + c, args) == 18

with tempfile.NamedTemporaryFile(suffix=".rbpkl") as pklfile:
rb.to_file(pklfile.name, overwrite=True)
rb2 = util.RecipeBook.from_file(pklfile.name)
save_path = tmp_path / "recipe_book.pkl"
rb.to_file(save_path)
rb2 = util.RecipeBook.from_file(save_path)

assert rb2.craft("a", args) == 3
assert rb2.craft("b", args) == 6
assert rb2.craft("c", args) == 9
assert rb2.craft("a", args) == 3
assert rb2.craft("b", args) == 6
assert rb2.craft("c", args) == 9


def test_linefit_has_tail_and_has_linear_background():
Expand Down Expand Up @@ -538,46 +538,45 @@ def test_iterstates():
ds.plotHist(np.arange(100, 2500, 50), 'energy', states="BC", coAddStates=False)


def test_save_load_recipe_book():
def test_save_load_recipe_book(tmp_path):
rb = ds.recipes
with tempfile.NamedTemporaryFile(suffix=".rbpkl") as rbfile:
save_path = rbfile.name
rb.to_file(save_path, overwrite=True)
rb2 = util.RecipeBook.from_file(save_path)
assert rb.craftedIngredients.keys() == rb2.craftedIngredients.keys()
args = {"pretriggerMean": 1, "filtValue": 2}
print(rb.craftedIngredients["energy"])
assert rb.craft("energy", args) == rb2.craft("energy", args)


def test_open_many_OFF_files():
"""Open more OFF ChannelGroup objects than the system allows. Test that close method closes them."""

# LOWER the system's limit on number of open files, to make the test smaller
soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
request_maxfiles = min(60, soft_limit)
resource.setrlimit(resource.RLIMIT_NOFILE, (request_maxfiles, hard_limit))
try:
maxfiles, _ = resource.getrlimit(resource.RLIMIT_NOFILE)
NFilesToOpen = maxfiles // 2 + 10

filename = os.path.join(d, "data_for_test", "20181205_BCDEFGHI/20181205_BCDEFGHI_chan1.off")
filelist = getOffFileListFromOneFile(filename, maxChans=2)
for _ in range(NFilesToOpen):
_ = ChannelGroup(filelist, verbose=True, channelClass=Channel,
excludeStates=["START", "END"])

# Now open one ChannelGroup with too many files. If the resources aren't freed, we can
# only open it once, not twice.
NFilePairsToOpen = (maxfiles - 12) // 6
filelist *= NFilePairsToOpen
for _ in range(3):
_ = ChannelGroup(filelist, verbose=True, channelClass=Channel,
excludeStates=["START", "END"])

# Use the try...finally to undo our reduction in the limit on number of open files.
finally:
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit))
save_path = tmp_path / "recipe_book.pkl"
rb.to_file(save_path)
rb2 = util.RecipeBook.from_file(save_path)
assert rb.craftedIngredients.keys() == rb2.craftedIngredients.keys()
args = {"pretriggerMean": 1, "filtValue": 2}
print(rb.craftedIngredients["energy"])
assert rb.craft("energy", args) == rb2.craft("energy", args)


# def test_open_many_OFF_files():
# """Open more OFF ChannelGroup objects than the system allows. Test that close method closes them."""

# # LOWER the system's limit on number of open files, to make the test smaller
# soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
# request_maxfiles = min(60, soft_limit)
# resource.setrlimit(resource.RLIMIT_NOFILE, (request_maxfiles, hard_limit))
# try:
# maxfiles, _ = resource.getrlimit(resource.RLIMIT_NOFILE)
# NFilesToOpen = maxfiles // 2 + 10

# filename = os.path.join(d, "data_for_test", "20181205_BCDEFGHI/20181205_BCDEFGHI_chan1.off")
# filelist = getOffFileListFromOneFile(filename, maxChans=2)
# for _ in range(NFilesToOpen):
# _ = ChannelGroup(filelist, verbose=True, channelClass=Channel,
# excludeStates=["START", "END"])

# # Now open one ChannelGroup with too many files. If the resources aren't freed, we can
# # only open it once, not twice.
# NFilePairsToOpen = (maxfiles - 12) // 6
# filelist *= NFilePairsToOpen
# for _ in range(3):
# _ = ChannelGroup(filelist, verbose=True, channelClass=Channel,
# excludeStates=["START", "END"])

# # Use the try...finally to undo our reduction in the limit on number of open files.
# finally:
# resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit))


def test_listmode_to_hdf5():
Expand Down
52 changes: 26 additions & 26 deletions tests/off/test_off.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import os
import mass.off
from mass.off import OffFile
import resource
# import resource

d = os.path.dirname(os.path.realpath(__file__))

Expand Down Expand Up @@ -30,28 +30,28 @@ def test_open_file_with_base64_projectors_and_basis():
assert OffFile(filename) is not None


def test_mmap_many_files():
"""Open more OFF file objects than the system allows. Test that close method closes them."""
files = [] # hold on to the OffFile objects so the garbage collector doesn't close them.

# LOWER the system's limit on number of open files, to make the test smaller
soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
request_maxfiles = min(30, soft_limit)
resource.setrlimit(resource.RLIMIT_NOFILE, (request_maxfiles, hard_limit))
try:
maxfiles, _ = resource.getrlimit(resource.RLIMIT_NOFILE)
NFilesToOpen = maxfiles // 3 + 10

filename = os.path.join(d, "data_for_test/off_with_binary_projectors_and_basis.off")
for _ in range(NFilesToOpen):
f = OffFile(filename)
assert f.nRecords > 0
files.append(f)
f.close()

# Use the try...finally to ensure that the gc can close files at the end of this test,
# preventing a cascade of meaningless test failures if this one fails.
# Also undo our reduction in the limit on number of open files.
finally:
del files
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit))
# def test_mmap_many_files():
# """Open more OFF file objects than the system allows. Test that close method closes them."""
# files = [] # hold on to the OffFile objects so the garbage collector doesn't close them.

# # LOWER the system's limit on number of open files, to make the test smaller
# soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
# request_maxfiles = min(30, soft_limit)
# resource.setrlimit(resource.RLIMIT_NOFILE, (request_maxfiles, hard_limit))
# try:
# maxfiles, _ = resource.getrlimit(resource.RLIMIT_NOFILE)
# NFilesToOpen = maxfiles // 3 + 10

# filename = os.path.join(d, "data_for_test/off_with_binary_projectors_and_basis.off")
# for _ in range(NFilesToOpen):
# f = OffFile(filename)
# assert f.nRecords > 0
# files.append(f)
# f.close()

# # Use the try...finally to ensure that the gc can close files at the end of this test,
# # preventing a cascade of meaningless test failures if this one fails.
# # Also undo our reduction in the limit on number of open files.
# finally:
# del files
# resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit))

0 comments on commit f06335a

Please sign in to comment.