Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Resolved the segmentation fault issue when loading a checkpoint with HDF5 data recording enabled. #1837

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 24 additions & 12 deletions include/trick/DRHDF5.hh
Original file line number Diff line number Diff line change
Expand Up @@ -37,16 +37,6 @@ PROGRAMMERS:

namespace Trick {

#ifdef HDF5
#ifndef TRICK_ICG
struct HDF5_INFO {
hid_t dataset;
Trick::DataRecordBuffer * drb ;
};
#endif
#endif


/**
The DRHDF5 recording format is an industry conforming HDF5 formatted file. Files written in this format are named
log_<group_name>.h5. The contents of this file type are readable by the Trick Data Products packages from
Expand All @@ -56,6 +46,9 @@ namespace Trick {
@verbatim
GROUP "/" {
GROUP "header" {
DATASET "byte_order" {
"little_endian"
}
DATASET "file_names" {
"param_1_file_name", "param_2_file_name", etc...
}
Expand Down Expand Up @@ -133,10 +126,29 @@ GROUP "/" {
protected:

#ifdef HDF5
std::vector<HDF5_INFO *> parameters; // trick_io(**)

/**
The HDF5 file handle.
*/
hid_t file; // trick_io(**)
/**
Root group and header group in the HDF5 file.
*/
hid_t root_group, header_group; // trick_io(**)

/**
Parameter names array to be used in the HDF5 packet table.
Each array item is a string of the parameter name that is
the copy of the reference name.
This is needed so when the dataset is closed, the reference
name in rec_buffer is still valid and won't cause double
deleting when variables are removed from rec_buffer.
*/
char** param_names; // trick_io(**)

/**
The dataset ids for each parameter.
*/
hid_t* param_dataset_ids; // trick_io(**)
#endif

} ;
Expand Down
43 changes: 43 additions & 0 deletions test/SIM_test_dr/Modified_data/dr_bitfHDF5.dr
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
global DR_GROUP_ID
global drg
try:
if DR_GROUP_ID >= 0:
DR_GROUP_ID += 1
except NameError:
DR_GROUP_ID = 0
drg = []

drg.append(trick.DRHDF5("DR_bitfieldsHDF5"))
drg[DR_GROUP_ID].set_freq(trick.DR_Always)
drg[DR_GROUP_ID].set_cycle(0.1)
drg[DR_GROUP_ID].set_single_prec_only(False)
drg[DR_GROUP_ID].add_variable("drx.drt.charB.var1")
drg[DR_GROUP_ID].add_variable("drx.drt.charB.var2")
drg[DR_GROUP_ID].add_variable("drx.drt.charB.var3")
drg[DR_GROUP_ID].add_variable("drx.drt.charB.var4")
drg[DR_GROUP_ID].add_variable("drx.drt.intB.var1")
drg[DR_GROUP_ID].add_variable("drx.drt.intB.var2")
drg[DR_GROUP_ID].add_variable("drx.drt.intB.var3")
drg[DR_GROUP_ID].add_variable("drx.drt.intB.var4")
drg[DR_GROUP_ID].add_variable("drx.drt.shortB.var1")
drg[DR_GROUP_ID].add_variable("drx.drt.shortB.var2")
drg[DR_GROUP_ID].add_variable("drx.drt.shortB.var3")
drg[DR_GROUP_ID].add_variable("drx.drt.shortB.var4")
drg[DR_GROUP_ID].add_variable("drx.drt.ucharB.var1")
drg[DR_GROUP_ID].add_variable("drx.drt.ucharB.var2")
drg[DR_GROUP_ID].add_variable("drx.drt.ucharB.var3")
drg[DR_GROUP_ID].add_variable("drx.drt.ucharB.var4")
drg[DR_GROUP_ID].add_variable("drx.drt.uintB.var1")
drg[DR_GROUP_ID].add_variable("drx.drt.uintB.var2")
drg[DR_GROUP_ID].add_variable("drx.drt.uintB.var3")
drg[DR_GROUP_ID].add_variable("drx.drt.uintB.var4")
drg[DR_GROUP_ID].add_variable("drx.drt.ushortB.var1")
drg[DR_GROUP_ID].add_variable("drx.drt.ushortB.var2")
drg[DR_GROUP_ID].add_variable("drx.drt.ushortB.var3")
drg[DR_GROUP_ID].add_variable("drx.drt.ushortB.var4")
drg[DR_GROUP_ID].add_variable("drx.drt.mixB.var1")
drg[DR_GROUP_ID].add_variable("drx.drt.mixB.var2")
drg[DR_GROUP_ID].add_variable("drx.drt.mixB.var3")
drg[DR_GROUP_ID].add_variable("drx.drt.mixB.var4")
trick.add_data_record_group(drg[DR_GROUP_ID], trick.DR_Buffer)
drg[DR_GROUP_ID].enable()
38 changes: 38 additions & 0 deletions test/SIM_test_dr/Modified_data/dr_typesHDF5.dr
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
global DR_GROUP_ID
global drg
try:
if DR_GROUP_ID >= 0:
DR_GROUP_ID += 1
except NameError:
DR_GROUP_ID = 0
drg = []

drg.append(trick.DRHDF5("DR_typesHDF5"))
drg[DR_GROUP_ID].set_freq(trick.DR_Always)
drg[DR_GROUP_ID].set_cycle(0.1)
drg[DR_GROUP_ID].set_single_prec_only(False)
drg[DR_GROUP_ID].add_variable("drx.drt.a")
drg[DR_GROUP_ID].add_variable("drx.drt.b")
drg[DR_GROUP_ID].add_variable("drx.drt.c")
drg[DR_GROUP_ID].add_variable("drx.drt.d")
drg[DR_GROUP_ID].add_variable("drx.drt.e")
drg[DR_GROUP_ID].add_variable("drx.drt.f")
drg[DR_GROUP_ID].add_variable("drx.drt.g")
drg[DR_GROUP_ID].add_variable("drx.drt.h")
drg[DR_GROUP_ID].add_variable("drx.drt.i")
drg[DR_GROUP_ID].add_variable("drx.drt.j")
drg[DR_GROUP_ID].add_variable("drx.drt.k")
drg[DR_GROUP_ID].add_variable("drx.drt.l")
drg[DR_GROUP_ID].add_variable("drx.drt.m")
drg[DR_GROUP_ID].add_variable("drx.drt.n")
drg[DR_GROUP_ID].add_variable("drx.drt.o")
drg[DR_GROUP_ID].add_variable("drx.drt.p")
drg[DR_GROUP_ID].add_variable("drx.drt.q[0]")
drg[DR_GROUP_ID].add_variable("drx.drt.q[1]")
drg[DR_GROUP_ID].add_variable("drx.drt.q[2]")
drg[DR_GROUP_ID].add_variable("drx.drt.q[3]")
drg[DR_GROUP_ID].add_variable("drx.drt.q[4]")
drg[DR_GROUP_ID].add_variable("drx.drt.r[0][0]")

trick.add_data_record_group(drg[DR_GROUP_ID], trick.DR_Buffer)
drg[DR_GROUP_ID].enable()
Binary file not shown.
Binary file not shown.
30 changes: 24 additions & 6 deletions test/SIM_test_dr/RUN_test/unit_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@

trick_utest.unit_tests.set_test_name( "DRTest" )

has_dhf5 = False
if hasattr(trick, 'DRHDF5'):
has_dhf5 = True

######################################################################################################################

test_suite = "drg api"
Expand All @@ -16,10 +20,18 @@
TRICK_EXPECT_EQ( num_drgs , 0 , test_suite , "0 drgs before any created" )

# The first item of each pair is the .dr file name and the second item of each pair is the drg name
dr_file_name_drg_name_tuple = (('Modified_data/dr_typesASCII.dr', 'DR_typesASCII'),
('Modified_data/dr_typesBINARY.dr', 'DR_typesBINARY'),
('Modified_data/dr_bitfASCII.dr', 'DR_bitfieldsASCII'),
('Modified_data/dr_bitfBINARY.dr', 'DR_bitfieldsBINARY'))
if has_dhf5:
dr_file_name_drg_name_tuple = (('Modified_data/dr_typesASCII.dr', 'DR_typesASCII'),
('Modified_data/dr_typesBINARY.dr', 'DR_typesBINARY'),
('Modified_data/dr_typesHDF5.dr', 'DR_typesHDF5'),
('Modified_data/dr_bitfASCII.dr', 'DR_bitfieldsASCII'),
('Modified_data/dr_bitfBINARY.dr', 'DR_bitfieldsBINARY'),
('Modified_data/dr_bitfHDF5.dr', 'DR_bitfieldsHDF5'))
else:
dr_file_name_drg_name_tuple = (('Modified_data/dr_typesASCII.dr', 'DR_typesASCII'),
('Modified_data/dr_typesBINARY.dr', 'DR_typesBINARY'),
('Modified_data/dr_bitfASCII.dr', 'DR_bitfieldsASCII'),
('Modified_data/dr_bitfBINARY.dr', 'DR_bitfieldsBINARY'))

num_files = len(dr_file_name_drg_name_tuple)
for i in range(num_files):
Expand All @@ -29,7 +41,10 @@
num_drgs = trick.get_num_data_record_groups()

# Check the result of trick.get_num_data_record_groups()
TRICK_EXPECT_EQ( num_drgs , 4 , test_suite , "num of dr groups = 4" )
if has_dhf5:
TRICK_EXPECT_EQ( num_drgs , 6 , test_suite , "num of dr groups = 6" )
else:
TRICK_EXPECT_EQ( num_drgs , 4 , test_suite , "num of dr groups = 4" )

# Test trick.get_data_record_group(<drg_name>) for getting the drg pointer by its name
# Check the name of the obtained drg instead of the drg pointer
Expand All @@ -49,7 +64,10 @@
is_null = False
if trick.get_data_record_group_by_idx(num_drgs+1) is None :
is_null = True
TRICK_EXPECT_TRUE( is_null, test_suite , "null drg by drg id 5" )
if has_dhf5:
TRICK_EXPECT_TRUE( is_null, test_suite , "null drg by drg id 7" )
else:
TRICK_EXPECT_TRUE( is_null, test_suite , "null drg by drg id 5" )

is_null = False
if trick.get_data_record_group_by_idx(-1) is None :
Expand Down
Loading
Loading