diff --git a/pywr-core/src/lib.rs b/pywr-core/src/lib.rs index e209fe0..5d3b5e6 100644 --- a/pywr-core/src/lib.rs +++ b/pywr-core/src/lib.rs @@ -40,7 +40,7 @@ pub mod timestep; pub mod utils; pub mod virtual_storage; -#[derive(Error, Debug, PartialEq, Eq)] +#[derive(Error, Debug)] pub enum PywrError { #[error("invalid node connect")] InvalidNodeConnection, @@ -157,7 +157,9 @@ pub enum PywrError { #[error("recorder does not supported aggregation")] RecorderDoesNotSupportAggregation, #[error("hdf5 error: {0}")] - HDF5Error(String), + HDF5Error(#[from] hdf5_metno::Error), + #[error("could not create unicode variable name from: {0}")] + HDF5VarLenUnicode(String), #[error("csv error: {0}")] CSVError(String), #[error("not implemented by recorder")] diff --git a/pywr-core/src/network.rs b/pywr-core/src/network.rs index 3f9ed99..a0f43bc 100644 --- a/pywr-core/src/network.rs +++ b/pywr-core/src/network.rs @@ -1714,29 +1714,25 @@ mod tests { network.add_input_node("my-node", None).unwrap(); // Second add with the same name - assert_eq!( + assert!(matches!( network.add_input_node("my-node", None), - Err(PywrError::NodeNameAlreadyExists("my-node".to_string())) - ); + Err(PywrError::NodeNameAlreadyExists(n)) if n == "my-node")); network.add_input_node("my-node", Some("a")).unwrap(); // Second add with the same name - assert_eq!( + assert!(matches!( network.add_input_node("my-node", Some("a")), - Err(PywrError::NodeNameAlreadyExists("my-node".to_string())) - ); + Err(PywrError::NodeNameAlreadyExists(n)) if n == "my-node")); - assert_eq!( + assert!(matches!( network.add_link_node("my-node", None), - Err(PywrError::NodeNameAlreadyExists("my-node".to_string())) - ); + Err(PywrError::NodeNameAlreadyExists(n)) if n == "my-node")); - assert_eq!( + assert!(matches!( network.add_output_node("my-node", None), - Err(PywrError::NodeNameAlreadyExists("my-node".to_string())) - ); + Err(PywrError::NodeNameAlreadyExists(n)) if n == "my-node")); - assert_eq!( + assert!(matches!( network.add_storage_node( "my-node", None, @@ -1744,8 +1740,7 @@ mod tests { None, Some(10.0.into()) ), - Err(PywrError::NodeNameAlreadyExists("my-node".to_string())) - ); + Err(PywrError::NodeNameAlreadyExists(n)) if n == "my-node")); } #[test] @@ -1762,10 +1757,10 @@ mod tests { node.set_max_flow_constraint(Some(parameter.into())).unwrap(); // Try to assign a constraint not defined for particular node type - assert_eq!( + assert!(matches!( node.set_max_volume_constraint(Some(10.0.into())), Err(PywrError::StorageConstraintsUndefined) - ); + )); } #[test] diff --git a/pywr-core/src/recorders/hdf.rs b/pywr-core/src/recorders/hdf.rs index 6cfb6d4..858e227 100644 --- a/pywr-core/src/recorders/hdf.rs +++ b/pywr-core/src/recorders/hdf.rs @@ -72,19 +72,14 @@ impl Recorder for HDF5Recorder { &self.meta } fn setup(&self, domain: &ModelDomain, network: &Network) -> Result>, PywrError> { - let file = match hdf5_metno::File::create(&self.filename) { - Ok(f) => f, - Err(e) => return Err(PywrError::HDF5Error(e.to_string())), - }; + let file = hdf5_metno::File::create(&self.filename)?; write_pywr_metadata(&file)?; write_scenarios_metadata(&file, domain.scenarios())?; // Create the time table let dates: Array1<_> = domain.time().timesteps().iter().map(DateTime::from_timestamp).collect(); - if let Err(e) = file.deref().new_dataset_builder().with_data(&dates).create("time") { - return Err(PywrError::HDF5Error(e.to_string())); - } + file.deref().new_dataset_builder().with_data(&dates).create("time")?; let shape = (domain.time().len(), domain.scenarios().len()); @@ -130,9 +125,7 @@ impl Recorder for HDF5Recorder { .map(|(_, s)| metric.get_value(model, s)) .collect::, _>>()?; - if let Err(e) = dataset.write_slice(&values, s![timestep.index, ..]) { - return Err(PywrError::HDF5Error(e.to_string())); - } + dataset.write_slice(&values, s![timestep.index, ..])?; } Ok(()) @@ -149,7 +142,7 @@ impl Recorder for HDF5Recorder { match internal_state.take() { Some(internal) => { if let Ok(internal) = internal.downcast::() { - internal.file.close().map_err(|e| PywrError::HDF5Error(e.to_string())) + Ok(internal.file.close()?) } else { panic!("Internal state did not downcast to the correct type! :("); } @@ -160,11 +153,7 @@ impl Recorder for HDF5Recorder { } fn require_dataset>(parent: &Group, shape: S, name: &str) -> Result { - parent - .new_dataset::() - .shape(shape) - .create(name) - .map_err(|e| PywrError::HDF5Error(e.to_string())) + Ok(parent.new_dataset::().shape(shape).create(name)?) } /// Create a node dataset in /parent/name/sub_name/attribute @@ -177,28 +166,22 @@ fn require_metric_dataset>( let ds = require_dataset(&grp, shape, metric.attribute())?; // Write the type and subtype as attributes - let ty = - hdf5_metno::types::VarLenUnicode::from_str(metric.ty()).map_err(|e| PywrError::HDF5Error(e.to_string()))?; + let ty = hdf5_metno::types::VarLenUnicode::from_str(metric.ty()) + .map_err(|e| PywrError::HDF5VarLenUnicode(e.to_string()))?; let attr = ds .new_attr::() .shape(()) - .create("pywr-type") - .map_err(|e| PywrError::HDF5Error(e.to_string()))?; - attr.as_writer() - .write_scalar(&ty) - .map_err(|e| PywrError::HDF5Error(e.to_string()))?; + .create("pywr-type")?; + attr.as_writer().write_scalar(&ty)?; if let Some(sub_type) = metric.sub_type() { - let sub_type = - hdf5_metno::types::VarLenUnicode::from_str(sub_type).map_err(|e| PywrError::HDF5Error(e.to_string()))?; + let sub_type = hdf5_metno::types::VarLenUnicode::from_str(sub_type) + .map_err(|e| PywrError::HDF5VarLenUnicode(e.to_string()))?; let attr = ds .new_attr::() .shape(()) - .create("pywr-subtype") - .map_err(|e| PywrError::HDF5Error(e.to_string()))?; - attr.as_writer() - .write_scalar(&sub_type) - .map_err(|e| PywrError::HDF5Error(e.to_string()))?; + .create("pywr-subtype")?; + attr.as_writer().write_scalar(&sub_type)?; } Ok(ds) } @@ -208,9 +191,7 @@ fn require_group(parent: &Group, name: &str) -> Result { Ok(g) => Ok(g), Err(_) => { // Group could not be retrieved already, try to create it instead - parent - .create_group(name) - .map_err(|e| PywrError::HDF5Error(e.to_string())) + Ok(parent.create_group(name)?) } } } @@ -220,16 +201,13 @@ fn write_pywr_metadata(file: &hdf5_metno::File) -> Result<(), PywrError> { const VERSION: &str = env!("CARGO_PKG_VERSION"); let version = - hdf5_metno::types::VarLenUnicode::from_str(VERSION).map_err(|e| PywrError::HDF5Error(e.to_string()))?; + hdf5_metno::types::VarLenUnicode::from_str(VERSION).map_err(|e| PywrError::HDF5VarLenUnicode(e.to_string()))?; let attr = root .new_attr::() .shape(()) - .create("pywr-version") - .map_err(|e| PywrError::HDF5Error(e.to_string()))?; - attr.as_writer() - .write_scalar(&version) - .map_err(|e| PywrError::HDF5Error(e.to_string()))?; + .create("pywr-version")?; + attr.as_writer().write_scalar(&version)?; Ok(()) } @@ -261,15 +239,13 @@ fn write_scenarios_metadata(file: &hdf5_metno::File, domain: &ScenarioDomain) -> .iter() .map(|s| { let name = hdf5_metno::types::VarLenUnicode::from_str(s.name()) - .map_err(|e| PywrError::HDF5Error(e.to_string()))?; + .map_err(|e| PywrError::HDF5VarLenUnicode(e.to_string()))?; Ok(ScenarioGroupEntry { name, size: s.size() }) }) .collect::>()?; - if let Err(e) = grp.new_dataset_builder().with_data(&scenario_groups).create("groups") { - return Err(PywrError::HDF5Error(e.to_string())); - } + grp.new_dataset_builder().with_data(&scenario_groups).create("groups")?; let scenarios: Array1 = domain .indices() @@ -284,9 +260,7 @@ fn write_scenarios_metadata(file: &hdf5_metno::File, domain: &ScenarioDomain) -> }) .collect::>()?; - if let Err(e) = grp.new_dataset_builder().with_data(&scenarios).create("indices") { - return Err(PywrError::HDF5Error(e.to_string())); - } + grp.new_dataset_builder().with_data(&scenarios).create("indices")?; Ok(()) }