diff --git a/Cargo.lock b/Cargo.lock index 459879b..445c458 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1569,6 +1569,7 @@ dependencies = [ "glob", "thiserror", "tracing", + "zip", ] [[package]] @@ -1581,6 +1582,7 @@ dependencies = [ "thiserror", "tracing", "tracing-subscriber", + "zip", ] [[package]] diff --git a/eco-cbz/src/cbz.rs b/eco-cbz/src/cbz.rs index 69ed245..7da2915 100644 --- a/eco-cbz/src/cbz.rs +++ b/eco-cbz/src/cbz.rs @@ -244,6 +244,19 @@ where self.insert_with_extension_and_file_options(image, extension, FileOptions::default()) } + /// ## Errors + /// + /// Same behavior as `insert_with_extension_and_file_options` + #[allow(clippy::missing_panics_doc)] + pub fn insert_with_file_options( + &mut self, + image: Image, + file_options: FileOptions, + ) -> Result<()> { + let extension = image.format().extensions_str().first().unwrap(); + self.insert_with_extension_and_file_options(image, extension, file_options) + } + /// ## Errors /// /// Same behavior as `insert_with_extension_and_file_options` diff --git a/eco-convert/src/lib.rs b/eco-convert/src/lib.rs index 0a1ddef..a54d15c 100644 --- a/eco-convert/src/lib.rs +++ b/eco-convert/src/lib.rs @@ -53,6 +53,9 @@ pub struct ConvertOptions { /// Reading order pub reading_order: ReadingOrder, + + /// If not provided the images are stored as is (fastest), value must be between 0-9 + pub compression_level: Option, } #[allow(clippy::missing_errors_doc)] @@ -70,6 +73,7 @@ pub fn convert(opts: ConvertOptions) -> Result<()> { opts.blur, opts.autosplit, opts.reading_order, + opts.compression_level, )? } Format::Pdf => { @@ -83,6 +87,7 @@ pub fn convert(opts: ConvertOptions) -> Result<()> { opts.blur, opts.autosplit, opts.reading_order, + opts.compression_level, )? } }; diff --git a/eco-merge/Cargo.toml b/eco-merge/Cargo.toml index 3c4ce1d..1dee124 100644 --- a/eco-merge/Cargo.toml +++ b/eco-merge/Cargo.toml @@ -10,3 +10,4 @@ eco-cbz.workspace = true glob.workspace = true thiserror.workspace = true tracing.workspace = true +zip.workspace = true diff --git a/eco-merge/src/lib.rs b/eco-merge/src/lib.rs index c81367b..20eedae 100644 --- a/eco-merge/src/lib.rs +++ b/eco-merge/src/lib.rs @@ -4,6 +4,7 @@ use camino::Utf8PathBuf; use eco_cbz::{CbzReader, CbzWriter}; use glob::glob; use tracing::warn; +use zip::{write::FileOptions, CompressionMethod}; pub use crate::errors::{Error, Result}; @@ -19,12 +20,22 @@ pub struct MergeOptions { /// The merged archive name pub name: String, + + /// If not provided the images are stored as is (fastest), value must be between 0-9 + pub compression_level: Option, } #[allow(clippy::missing_errors_doc, clippy::needless_pass_by_value)] pub fn merge(opts: MergeOptions) -> Result<()> { let mut merged_cbz_writer = CbzWriter::default(); + let mut file_options = FileOptions::default(); + if let Some(compression_level) = opts.compression_level { + file_options = file_options.compression_level(Some(compression_level)); + } else { + file_options = file_options.compression_method(CompressionMethod::Stored); + } + for path in glob(&opts.archives_glob)? { let mut current_cbz = CbzReader::try_from_path(path?)?; @@ -36,7 +47,7 @@ pub fn merge(opts: MergeOptions) -> Result<()> { return Ok::<(), Error>(()); } }; - merged_cbz_writer.insert(image)?; + merged_cbz_writer.insert_with_file_options(image, file_options)?; Ok::<(), Error>(()) })?; diff --git a/eco-pack/Cargo.toml b/eco-pack/Cargo.toml index 739dde6..acb957c 100644 --- a/eco-pack/Cargo.toml +++ b/eco-pack/Cargo.toml @@ -11,3 +11,4 @@ glob.workspace = true thiserror.workspace = true tracing.workspace = true tracing-subscriber.workspace = true +zip.workspace = true diff --git a/eco-pack/src/lib.rs b/eco-pack/src/lib.rs index fc2a0b9..134f6d1 100644 --- a/eco-pack/src/lib.rs +++ b/eco-pack/src/lib.rs @@ -13,6 +13,7 @@ use eco_cbz::{ }; use glob::glob; use tracing::{debug, error}; +use zip::{write::FileOptions, CompressionMethod}; pub use crate::errors::{Error, Result}; @@ -45,8 +46,17 @@ pub fn pack_imgs_to_cbz( blur: Option, autosplit: bool, reading_order: ReadingOrder, + compression_level: Option, ) -> Result>>> { let mut cbz_writer = CbzWriter::default(); + + let mut file_options = FileOptions::default(); + if let Some(compression_level) = compression_level { + file_options = file_options.compression_level(Some(compression_level)); + } else { + file_options = file_options.compression_method(CompressionMethod::Stored); + } + for mut img in imgs { if let Some(contrast) = contrast { img = img.set_contrast(contrast); @@ -60,10 +70,10 @@ pub fn pack_imgs_to_cbz( if autosplit && img.is_landscape() { debug!("splitting landscape file"); let (img_left, img_right) = img.autosplit(reading_order); - cbz_writer.insert(img_left)?; - cbz_writer.insert(img_right)?; + cbz_writer.insert_with_file_options(img_left, file_options)?; + cbz_writer.insert_with_file_options(img_right, file_options)?; } else { - cbz_writer.insert(img)?; + cbz_writer.insert_with_file_options(img, file_options)?; } } @@ -95,6 +105,9 @@ pub struct PackOptions { /// Reading order pub reading_order: ReadingOrder, + + /// If not provided the images are stored as is (fastest), value must be between 0-9 + pub compression_level: Option, } #[allow(clippy::missing_errors_doc)] @@ -117,6 +130,7 @@ pub fn pack(opts: PackOptions) -> Result<()> { opts.blur, opts.autosplit, opts.reading_order, + opts.compression_level, )?; cbz_writer.write_to_path(outdir.join(format!("{}.cbz", opts.name)))?; diff --git a/eco/src/main.rs b/eco/src/main.rs index 077c1b4..7cf7337 100644 --- a/eco/src/main.rs +++ b/eco/src/main.rs @@ -62,6 +62,10 @@ enum Command { /// Reading order #[clap(long, default_value_t = ReadingOrder::Rtl)] reading_order: ReadingOrder, + + /// If not provided the images are stored as is (fastest), value must be between 0-9 + #[clap(long)] + compression_level: Option, }, Merge { /// A glob that matches all the archive to merge @@ -75,6 +79,10 @@ enum Command { /// The merged archive name #[clap(short, long)] name: String, + + /// If not provided the images are stored as is (fastest), value must be between 0-9 + #[clap(long)] + compression_level: Option, }, Pack { /// A glob that matches all the files to pack @@ -107,6 +115,10 @@ enum Command { /// Reading order #[clap(long, default_value_t = ReadingOrder::Rtl)] reading_order: ReadingOrder, + + /// If not provided the images are stored as is (fastest), value must be between 0-9 + #[clap(long)] + compression_level: Option, }, View { /// The path to the e-book file to view @@ -133,6 +145,7 @@ fn main() -> Result<()> { blur, autosplit, reading_order, + compression_level, } => eco_convert::convert(eco_convert::ConvertOptions { path, from: from.into(), @@ -143,15 +156,18 @@ fn main() -> Result<()> { blur, autosplit, reading_order: reading_order.into(), + compression_level, })?, Command::Merge { archives_glob, outdir, name, + compression_level, } => eco_merge::merge(eco_merge::MergeOptions { archives_glob, outdir, name, + compression_level, })?, Command::Pack { files_descriptor, @@ -162,6 +178,7 @@ fn main() -> Result<()> { blur, autosplit, reading_order, + compression_level, } => eco_pack::pack(eco_pack::PackOptions { files_descriptor, outdir, @@ -171,6 +188,7 @@ fn main() -> Result<()> { blur, autosplit, reading_order: reading_order.into(), + compression_level, })?, Command::View { path, type_ } => eco_view::view(eco_view::ViewOptions { path,