Skip to content

Commit

Permalink
feat: Improve HDF5 error handling in pywr-core.
Browse files Browse the repository at this point in the history
Implement #[from] for the HDF5 error. This requires removing the
`PartialEq` and `Eq` derives for `PywrError`, and adding a new
error variant for HDF5 unicode conversion. This allows neater
handling of HDF5 errors in core.
  • Loading branch information
jetuk committed Jan 20, 2025
1 parent 72f43ed commit a951e20
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 65 deletions.
6 changes: 4 additions & 2 deletions pywr-core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ pub mod timestep;
pub mod utils;
pub mod virtual_storage;

#[derive(Error, Debug, PartialEq, Eq)]
#[derive(Error, Debug)]
pub enum PywrError {
#[error("invalid node connect")]
InvalidNodeConnection,
Expand Down Expand Up @@ -157,7 +157,9 @@ pub enum PywrError {
#[error("recorder does not supported aggregation")]
RecorderDoesNotSupportAggregation,
#[error("hdf5 error: {0}")]
HDF5Error(String),
HDF5Error(#[from] hdf5_metno::Error),
#[error("could not create unicode variable name from: {0}")]
HDF5VarLenUnicode(String),
#[error("csv error: {0}")]
CSVError(String),
#[error("not implemented by recorder")]
Expand Down
29 changes: 12 additions & 17 deletions pywr-core/src/network.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1714,38 +1714,33 @@ mod tests {

network.add_input_node("my-node", None).unwrap();
// Second add with the same name
assert_eq!(
assert!(matches!(
network.add_input_node("my-node", None),
Err(PywrError::NodeNameAlreadyExists("my-node".to_string()))
);
Err(PywrError::NodeNameAlreadyExists(n)) if n == "my-node"));

network.add_input_node("my-node", Some("a")).unwrap();
// Second add with the same name
assert_eq!(
assert!(matches!(
network.add_input_node("my-node", Some("a")),
Err(PywrError::NodeNameAlreadyExists("my-node".to_string()))
);
Err(PywrError::NodeNameAlreadyExists(n)) if n == "my-node"));

assert_eq!(
assert!(matches!(
network.add_link_node("my-node", None),
Err(PywrError::NodeNameAlreadyExists("my-node".to_string()))
);
Err(PywrError::NodeNameAlreadyExists(n)) if n == "my-node"));

assert_eq!(
assert!(matches!(
network.add_output_node("my-node", None),
Err(PywrError::NodeNameAlreadyExists("my-node".to_string()))
);
Err(PywrError::NodeNameAlreadyExists(n)) if n == "my-node"));

assert_eq!(
assert!(matches!(
network.add_storage_node(
"my-node",
None,
StorageInitialVolume::Absolute(10.0),
None,
Some(10.0.into())
),
Err(PywrError::NodeNameAlreadyExists("my-node".to_string()))
);
Err(PywrError::NodeNameAlreadyExists(n)) if n == "my-node"));
}

#[test]
Expand All @@ -1762,10 +1757,10 @@ mod tests {
node.set_max_flow_constraint(Some(parameter.into())).unwrap();

// Try to assign a constraint not defined for particular node type
assert_eq!(
assert!(matches!(
node.set_max_volume_constraint(Some(10.0.into())),
Err(PywrError::StorageConstraintsUndefined)
);
));
}

#[test]
Expand Down
66 changes: 20 additions & 46 deletions pywr-core/src/recorders/hdf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,19 +72,14 @@ impl Recorder for HDF5Recorder {
&self.meta
}
fn setup(&self, domain: &ModelDomain, network: &Network) -> Result<Option<Box<(dyn Any)>>, PywrError> {
let file = match hdf5_metno::File::create(&self.filename) {
Ok(f) => f,
Err(e) => return Err(PywrError::HDF5Error(e.to_string())),
};
let file = hdf5_metno::File::create(&self.filename)?;

write_pywr_metadata(&file)?;
write_scenarios_metadata(&file, domain.scenarios())?;

// Create the time table
let dates: Array1<_> = domain.time().timesteps().iter().map(DateTime::from_timestamp).collect();
if let Err(e) = file.deref().new_dataset_builder().with_data(&dates).create("time") {
return Err(PywrError::HDF5Error(e.to_string()));
}
file.deref().new_dataset_builder().with_data(&dates).create("time")?;

let shape = (domain.time().len(), domain.scenarios().len());

Expand Down Expand Up @@ -130,9 +125,7 @@ impl Recorder for HDF5Recorder {
.map(|(_, s)| metric.get_value(model, s))
.collect::<Result<Vec<_>, _>>()?;

if let Err(e) = dataset.write_slice(&values, s![timestep.index, ..]) {
return Err(PywrError::HDF5Error(e.to_string()));
}
dataset.write_slice(&values, s![timestep.index, ..])?;
}

Ok(())
Expand All @@ -149,7 +142,7 @@ impl Recorder for HDF5Recorder {
match internal_state.take() {
Some(internal) => {
if let Ok(internal) = internal.downcast::<Internal>() {
internal.file.close().map_err(|e| PywrError::HDF5Error(e.to_string()))
Ok(internal.file.close()?)
} else {
panic!("Internal state did not downcast to the correct type! :(");
}
Expand All @@ -160,11 +153,7 @@ impl Recorder for HDF5Recorder {
}

fn require_dataset<S: Into<Extents>>(parent: &Group, shape: S, name: &str) -> Result<hdf5_metno::Dataset, PywrError> {
parent
.new_dataset::<f64>()
.shape(shape)
.create(name)
.map_err(|e| PywrError::HDF5Error(e.to_string()))
Ok(parent.new_dataset::<f64>().shape(shape).create(name)?)
}

/// Create a node dataset in /parent/name/sub_name/attribute
Expand All @@ -177,28 +166,22 @@ fn require_metric_dataset<S: Into<Extents>>(
let ds = require_dataset(&grp, shape, metric.attribute())?;

// Write the type and subtype as attributes
let ty =
hdf5_metno::types::VarLenUnicode::from_str(metric.ty()).map_err(|e| PywrError::HDF5Error(e.to_string()))?;
let ty = hdf5_metno::types::VarLenUnicode::from_str(metric.ty())
.map_err(|e| PywrError::HDF5VarLenUnicode(e.to_string()))?;
let attr = ds
.new_attr::<hdf5_metno::types::VarLenUnicode>()
.shape(())
.create("pywr-type")
.map_err(|e| PywrError::HDF5Error(e.to_string()))?;
attr.as_writer()
.write_scalar(&ty)
.map_err(|e| PywrError::HDF5Error(e.to_string()))?;
.create("pywr-type")?;
attr.as_writer().write_scalar(&ty)?;

if let Some(sub_type) = metric.sub_type() {
let sub_type =
hdf5_metno::types::VarLenUnicode::from_str(sub_type).map_err(|e| PywrError::HDF5Error(e.to_string()))?;
let sub_type = hdf5_metno::types::VarLenUnicode::from_str(sub_type)
.map_err(|e| PywrError::HDF5VarLenUnicode(e.to_string()))?;
let attr = ds
.new_attr::<hdf5_metno::types::VarLenUnicode>()
.shape(())
.create("pywr-subtype")
.map_err(|e| PywrError::HDF5Error(e.to_string()))?;
attr.as_writer()
.write_scalar(&sub_type)
.map_err(|e| PywrError::HDF5Error(e.to_string()))?;
.create("pywr-subtype")?;
attr.as_writer().write_scalar(&sub_type)?;
}
Ok(ds)
}
Expand All @@ -208,9 +191,7 @@ fn require_group(parent: &Group, name: &str) -> Result<Group, PywrError> {
Ok(g) => Ok(g),
Err(_) => {
// Group could not be retrieved already, try to create it instead
parent
.create_group(name)
.map_err(|e| PywrError::HDF5Error(e.to_string()))
Ok(parent.create_group(name)?)
}
}
}
Expand All @@ -220,16 +201,13 @@ fn write_pywr_metadata(file: &hdf5_metno::File) -> Result<(), PywrError> {

const VERSION: &str = env!("CARGO_PKG_VERSION");
let version =
hdf5_metno::types::VarLenUnicode::from_str(VERSION).map_err(|e| PywrError::HDF5Error(e.to_string()))?;
hdf5_metno::types::VarLenUnicode::from_str(VERSION).map_err(|e| PywrError::HDF5VarLenUnicode(e.to_string()))?;

let attr = root
.new_attr::<hdf5_metno::types::VarLenUnicode>()
.shape(())
.create("pywr-version")
.map_err(|e| PywrError::HDF5Error(e.to_string()))?;
attr.as_writer()
.write_scalar(&version)
.map_err(|e| PywrError::HDF5Error(e.to_string()))?;
.create("pywr-version")?;
attr.as_writer().write_scalar(&version)?;

Ok(())
}
Expand Down Expand Up @@ -261,15 +239,13 @@ fn write_scenarios_metadata(file: &hdf5_metno::File, domain: &ScenarioDomain) ->
.iter()
.map(|s| {
let name = hdf5_metno::types::VarLenUnicode::from_str(s.name())
.map_err(|e| PywrError::HDF5Error(e.to_string()))?;
.map_err(|e| PywrError::HDF5VarLenUnicode(e.to_string()))?;

Ok(ScenarioGroupEntry { name, size: s.size() })
})
.collect::<Result<_, PywrError>>()?;

if let Err(e) = grp.new_dataset_builder().with_data(&scenario_groups).create("groups") {
return Err(PywrError::HDF5Error(e.to_string()));
}
grp.new_dataset_builder().with_data(&scenario_groups).create("groups")?;

let scenarios: Array1<H5ScenarioIndex> = domain
.indices()
Expand All @@ -284,9 +260,7 @@ fn write_scenarios_metadata(file: &hdf5_metno::File, domain: &ScenarioDomain) ->
})
.collect::<Result<_, PywrError>>()?;

if let Err(e) = grp.new_dataset_builder().with_data(&scenarios).create("indices") {
return Err(PywrError::HDF5Error(e.to_string()));
}
grp.new_dataset_builder().with_data(&scenarios).create("indices")?;

Ok(())
}

0 comments on commit a951e20

Please sign in to comment.