Skip to content

Commit

Permalink
feat(pack+merge+convert): Allow user to provide a compression level, …
Browse files Browse the repository at this point in the history
…images are stored as is by default making the commands faster
  • Loading branch information
gaku-sei committed Jan 8, 2024
1 parent 0dbd51f commit 175a84f
Show file tree
Hide file tree
Showing 8 changed files with 69 additions and 4 deletions.
2 changes: 2 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

13 changes: 13 additions & 0 deletions eco-cbz/src/cbz.rs
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,19 @@ where
self.insert_with_extension_and_file_options(image, extension, FileOptions::default())
}

/// ## Errors
///
/// Same behavior as `insert_with_extension_and_file_options`
#[allow(clippy::missing_panics_doc)]
pub fn insert_with_file_options<R: BufRead + Seek>(
&mut self,
image: Image<R>,
file_options: FileOptions,
) -> Result<()> {
let extension = image.format().extensions_str().first().unwrap();
self.insert_with_extension_and_file_options(image, extension, file_options)
}

/// ## Errors
///
/// Same behavior as `insert_with_extension_and_file_options`
Expand Down
5 changes: 5 additions & 0 deletions eco-convert/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@ pub struct ConvertOptions {

/// Reading order
pub reading_order: ReadingOrder,

/// If not provided the images are stored as is (fastest), value must be between 0-9
pub compression_level: Option<i32>,
}

#[allow(clippy::missing_errors_doc)]
Expand All @@ -70,6 +73,7 @@ pub fn convert(opts: ConvertOptions) -> Result<()> {
opts.blur,
opts.autosplit,
opts.reading_order,
opts.compression_level,
)?
}
Format::Pdf => {
Expand All @@ -83,6 +87,7 @@ pub fn convert(opts: ConvertOptions) -> Result<()> {
opts.blur,
opts.autosplit,
opts.reading_order,
opts.compression_level,
)?
}
};
Expand Down
1 change: 1 addition & 0 deletions eco-merge/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,4 @@ eco-cbz.workspace = true
glob.workspace = true
thiserror.workspace = true
tracing.workspace = true
zip.workspace = true
13 changes: 12 additions & 1 deletion eco-merge/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use camino::Utf8PathBuf;
use eco_cbz::{CbzReader, CbzWriter};
use glob::glob;
use tracing::warn;
use zip::{write::FileOptions, CompressionMethod};

pub use crate::errors::{Error, Result};

Expand All @@ -19,12 +20,22 @@ pub struct MergeOptions {

/// The merged archive name
pub name: String,

/// If not provided the images are stored as is (fastest), value must be between 0-9
pub compression_level: Option<i32>,
}

#[allow(clippy::missing_errors_doc, clippy::needless_pass_by_value)]
pub fn merge(opts: MergeOptions) -> Result<()> {
let mut merged_cbz_writer = CbzWriter::default();

let mut file_options = FileOptions::default();
if let Some(compression_level) = opts.compression_level {
file_options = file_options.compression_level(Some(compression_level));
} else {
file_options = file_options.compression_method(CompressionMethod::Stored);
}

for path in glob(&opts.archives_glob)? {
let mut current_cbz = CbzReader::try_from_path(path?)?;

Expand All @@ -36,7 +47,7 @@ pub fn merge(opts: MergeOptions) -> Result<()> {
return Ok::<(), Error>(());
}
};
merged_cbz_writer.insert(image)?;
merged_cbz_writer.insert_with_file_options(image, file_options)?;

Ok::<(), Error>(())
})?;
Expand Down
1 change: 1 addition & 0 deletions eco-pack/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,4 @@ glob.workspace = true
thiserror.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
zip.workspace = true
20 changes: 17 additions & 3 deletions eco-pack/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ use eco_cbz::{
};
use glob::glob;
use tracing::{debug, error};
use zip::{write::FileOptions, CompressionMethod};

pub use crate::errors::{Error, Result};

Expand Down Expand Up @@ -45,8 +46,17 @@ pub fn pack_imgs_to_cbz<R: BufRead + Seek>(
blur: Option<f32>,
autosplit: bool,
reading_order: ReadingOrder,
compression_level: Option<i32>,
) -> Result<CbzWriter<Cursor<Vec<u8>>>> {
let mut cbz_writer = CbzWriter::default();

let mut file_options = FileOptions::default();
if let Some(compression_level) = compression_level {
file_options = file_options.compression_level(Some(compression_level));
} else {
file_options = file_options.compression_method(CompressionMethod::Stored);
}

for mut img in imgs {
if let Some(contrast) = contrast {
img = img.set_contrast(contrast);
Expand All @@ -60,10 +70,10 @@ pub fn pack_imgs_to_cbz<R: BufRead + Seek>(
if autosplit && img.is_landscape() {
debug!("splitting landscape file");
let (img_left, img_right) = img.autosplit(reading_order);
cbz_writer.insert(img_left)?;
cbz_writer.insert(img_right)?;
cbz_writer.insert_with_file_options(img_left, file_options)?;
cbz_writer.insert_with_file_options(img_right, file_options)?;
} else {
cbz_writer.insert(img)?;
cbz_writer.insert_with_file_options(img, file_options)?;
}
}

Expand Down Expand Up @@ -95,6 +105,9 @@ pub struct PackOptions {

/// Reading order
pub reading_order: ReadingOrder,

/// If not provided the images are stored as is (fastest), value must be between 0-9
pub compression_level: Option<i32>,
}

#[allow(clippy::missing_errors_doc)]
Expand All @@ -117,6 +130,7 @@ pub fn pack(opts: PackOptions) -> Result<()> {
opts.blur,
opts.autosplit,
opts.reading_order,
opts.compression_level,
)?;

cbz_writer.write_to_path(outdir.join(format!("{}.cbz", opts.name)))?;
Expand Down
18 changes: 18 additions & 0 deletions eco/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,10 @@ enum Command {
/// Reading order
#[clap(long, default_value_t = ReadingOrder::Rtl)]
reading_order: ReadingOrder,

/// If not provided the images are stored as is (fastest), value must be between 0-9
#[clap(long)]
compression_level: Option<i32>,
},
Merge {
/// A glob that matches all the archive to merge
Expand All @@ -75,6 +79,10 @@ enum Command {
/// The merged archive name
#[clap(short, long)]
name: String,

/// If not provided the images are stored as is (fastest), value must be between 0-9
#[clap(long)]
compression_level: Option<i32>,
},
Pack {
/// A glob that matches all the files to pack
Expand Down Expand Up @@ -107,6 +115,10 @@ enum Command {
/// Reading order
#[clap(long, default_value_t = ReadingOrder::Rtl)]
reading_order: ReadingOrder,

/// If not provided the images are stored as is (fastest), value must be between 0-9
#[clap(long)]
compression_level: Option<i32>,
},
View {
/// The path to the e-book file to view
Expand All @@ -133,6 +145,7 @@ fn main() -> Result<()> {
blur,
autosplit,
reading_order,
compression_level,
} => eco_convert::convert(eco_convert::ConvertOptions {
path,
from: from.into(),
Expand All @@ -143,15 +156,18 @@ fn main() -> Result<()> {
blur,
autosplit,
reading_order: reading_order.into(),
compression_level,
})?,
Command::Merge {
archives_glob,
outdir,
name,
compression_level,
} => eco_merge::merge(eco_merge::MergeOptions {
archives_glob,
outdir,
name,
compression_level,
})?,
Command::Pack {
files_descriptor,
Expand All @@ -162,6 +178,7 @@ fn main() -> Result<()> {
blur,
autosplit,
reading_order,
compression_level,
} => eco_pack::pack(eco_pack::PackOptions {
files_descriptor,
outdir,
Expand All @@ -171,6 +188,7 @@ fn main() -> Result<()> {
blur,
autosplit,
reading_order: reading_order.into(),
compression_level,
})?,
Command::View { path, type_ } => eco_view::view(eco_view::ViewOptions {
path,
Expand Down

0 comments on commit 175a84f

Please sign in to comment.