diff --git a/Cargo.lock b/Cargo.lock index 3f11b58..4757729 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1435,13 +1435,28 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" +[[package]] +name = "eco" +version = "0.1.0" +dependencies = [ + "camino", + "clap", + "eco-cbz", + "eco-convert", + "eco-merge", + "eco-pack", + "eco-view", + "thiserror", + "tracing", + "tracing-subscriber", +] + [[package]] name = "eco-cbz" version = "0.1.0" dependencies = [ "camino", "chrono", - "clap", "image", "sanitize-filename", "serde", @@ -1453,12 +1468,10 @@ dependencies = [ ] [[package]] -name = "eco-converter" +name = "eco-convert" version = "0.1.0" dependencies = [ - "anyhow", "camino", - "clap", "eco-cbz", "eco-pack", "html5ever 0.26.0", @@ -1466,31 +1479,27 @@ dependencies = [ "markup5ever_rcdom", "mobi", "pdf", + "thiserror", "tl", "tracing", - "tracing-subscriber", ] [[package]] name = "eco-merge" version = "0.1.0" dependencies = [ - "anyhow", "camino", - "clap", "eco-cbz", "glob", + "thiserror", "tracing", - "tracing-subscriber", ] [[package]] name = "eco-pack" version = "0.1.0" dependencies = [ - "anyhow", "camino", - "clap", "eco-cbz", "glob", "thiserror", @@ -1499,13 +1508,11 @@ dependencies = [ ] [[package]] -name = "eco-viewer" +name = "eco-view" version = "0.1.0" dependencies = [ - "anyhow", "base64 0.21.5", "camino", - "clap", "dark-light", "dioxus", "dioxus-desktop", @@ -1517,7 +1524,6 @@ dependencies = [ "thiserror", "tl", "tracing", - "tracing-subscriber", "zip", ] diff --git a/Cargo.toml b/Cargo.toml index 6bfbd48..bd45353 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] resolver = "2" -members = ["eco-cbz", "eco-converter", "eco-merge", "eco-pack", "eco-viewer"] +members = ["eco", "eco-cbz", "eco-convert", "eco-merge", "eco-pack", "eco-view"] [workspace.package] rust-version = "1.73.0" @@ -21,7 +21,10 @@ dioxus = "0.4.0" dioxus-desktop = "0.4.0" dunce = "1.0.4" eco-cbz = { path = "./eco-cbz" } +eco-convert = { path = "./eco-convert" } +eco-merge = { path = "./eco-merge" } eco-pack = { path = "./eco-pack" } +eco-view = { path = "./eco-view" } epub = "2.1.1" futures = "0.3.28" glob = "0.3.1" diff --git a/README.md b/README.md index 71aa408..9d2a434 100644 --- a/README.md +++ b/README.md @@ -4,17 +4,17 @@ The repository host multiple cli and gui that allows you to edit, convert, merge ## Tools (with supported format): -- `eco-converter` - cli - Convert e-books to any format (from pdf, mobi, and DRM-free azw3, to cbz only for now) -- `eco-merge` - cli - Merge e-books together when it makes sense (cbz) -- `eco-pack` - cli - pack images into an e-book file (cbz) -- `eco-viewer` - gui - A dead simple e-book reader (cbz) +- `eco convert` - cli - Convert e-books to any format (from pdf, mobi, and DRM-free azw3, to cbz only for now) +- `eco merge` - cli - Merge e-books together when it makes sense (cbz) +- `eco pack` - cli - pack images into an e-book file (cbz) +- `eco view` - gui - A dead simple e-book reader (cbz) ## Eco Converter Converts e-books from \* to \* (only pdf, mobi, and DRM-free azw3 to cbz supported for the moment): ```bash -eco-converter "archive.azw3" --from azw3 --outdir out +eco convert "archive.azw3" --from azw3 --outdir out ``` ## Eco Merge (cbz only for now) @@ -22,7 +22,7 @@ eco-converter "archive.azw3" --from azw3 --outdir out This will look for all the e-books in `path` and which file name contains `something` and merge them into `output/merged_archive.cbz`: ```bash -eco-merge --archives-glob "path/**/*something*" --outdir "output" --name "merged_archive" +eco merge --archives-glob "path/**/*something*" --outdir "output" --name "merged_archive" ``` ## Eco Pack (cbz only for now) @@ -30,7 +30,7 @@ eco-merge --archives-glob "path/**/*something*" --outdir "output" --name "merged Takes all the `png` files under `source` and pack them into the `archive.cbz` file: ```bash -eco-pack "source/*.png" --name archive --autosplit +eco pack "source/*.png" --name archive --autosplit ``` Options include: @@ -39,10 +39,10 @@ Options include: - `--contrast`: change contrast - `--brightness`: change brightness -## Eco Viewer (cbz only for now) +## Eco View (cbz only for now) View any e-book file with this simple gui: ```bash -eco-viewer "my_archive.cbz" +eco view "my_archive.cbz" ``` diff --git a/eco-cbz/Cargo.toml b/eco-cbz/Cargo.toml index f8e2a14..0f9975a 100644 --- a/eco-cbz/Cargo.toml +++ b/eco-cbz/Cargo.toml @@ -5,7 +5,6 @@ edition.workspace = true rust-version.workspace = true [dependencies] -clap = { workspace = true, optional = true } camino.workspace = true chrono = { workspace = true, features = ["serde"], optional = true } image = { workspace = true, features = ["webp-encoder"] } @@ -19,5 +18,4 @@ zip.workspace = true [features] default = [] -clap = ["dep:clap"] metadata = ["dep:chrono", "dep:serde", "dep:serde_json", "dep:serde_repr"] diff --git a/eco-cbz/src/cbz.rs b/eco-cbz/src/cbz.rs index 8f5f76c..acda2b4 100644 --- a/eco-cbz/src/cbz.rs +++ b/eco-cbz/src/cbz.rs @@ -1,5 +1,3 @@ -#![deny(clippy::all, clippy::pedantic)] - use std::{ fs::{File, OpenOptions}, io::{Cursor, Read, Seek, Write}, diff --git a/eco-cbz/src/cbz_metadata.rs b/eco-cbz/src/cbz_metadata.rs index 817e2ed..fe01c49 100644 --- a/eco-cbz/src/cbz_metadata.rs +++ b/eco-cbz/src/cbz_metadata.rs @@ -73,6 +73,7 @@ pub enum Month { } impl Month { + #[must_use] pub fn as_str(self) -> &'static str { match self { Self::Jan => "January", @@ -150,6 +151,7 @@ pub struct ComicBookInfoV1 { } impl ComicBookInfoV1 { + #[must_use] pub fn new() -> Self { Self::default() } @@ -264,6 +266,7 @@ pub struct UnofficialMetadata { } impl UnofficialMetadata { + #[must_use] pub fn new() -> Self { Self::default() } @@ -300,13 +303,8 @@ impl UnofficialMetadata { key: impl Into, value: impl Serialize, ) -> Result { - if self.extra.is_none() { - self.extra = Some(HashMap::new()); - } - self.extra - .as_mut() - .unwrap() - .insert(key.into(), serde_json::to_value(value)?); + let extra = self.extra.get_or_insert(HashMap::new()); + extra.insert(key.into(), serde_json::to_value(value)?); Ok(self) } diff --git a/eco-cbz/src/errors.rs b/eco-cbz/src/errors.rs index e2e36d8..ff85b05 100644 --- a/eco-cbz/src/errors.rs +++ b/eco-cbz/src/errors.rs @@ -1,49 +1,47 @@ -use std::{io, result}; - use zip::result::ZipError; #[derive(Debug, thiserror::Error)] pub enum Error { - #[error("IO error {0}")] - IO(#[from] io::Error), + #[error("io error {0}")] + IO(#[from] std::io::Error), - #[error("Zip error {0}")] + #[error("zip error {0}")] Zip(#[from] ZipError), - #[error("Cbz file size couldn't be converted")] + #[error("cbz file size couldn't be converted")] CbzFileSizeConversion, - #[error("Cbz file name is empty")] + #[error("cbz file name is empty")] CbzFileNameEmpty, - #[error("Cbz file invalid index {0}")] + #[error("cbz file invalid index {0}")] CbzFileInvalidIndex(String), - #[error("File at index {0} not found in cbz")] + #[error("file at index {0} not found in cbz")] CbzNotFound(usize), - #[error("Cbz is too large, it can contain a maximum of {0} files")] + #[error("cbz is too large, it can contain a maximum of {0} files")] CbzTooLarge(usize), - #[error("Cbz file insertion's extension not provided")] + #[error("cbz file insertion's extension not provided")] CbzInsertionNoExtension, - #[error("Cbz file insertion: no bytes set")] + #[error("cbz file insertion: no bytes set")] CbzInsertionNoBytes, - #[error("Cbz metadata is too large: {0} > 65,535")] + #[error("cbz metadata is too large: {0} > 65,535")] CbzMetadataSize(usize), - #[error("Image error: {0}")] + #[error("image error: {0}")] Image(#[from] image::ImageError), #[cfg(feature = "metadata")] - #[error("Metadata error: {0}")] + #[error("metadata error: {0}")] MetadataFormat(#[from] serde_json::Error), #[cfg(feature = "metadata")] - #[error("Metadata value error: {0}")] + #[error("metadata value error: {0}")] MetadataValue(String), } -pub type Result = result::Result; +pub type Result = std::result::Result; diff --git a/eco-cbz/src/image.rs b/eco-cbz/src/image.rs index ad7c9c9..0ed53a4 100644 --- a/eco-cbz/src/image.rs +++ b/eco-cbz/src/image.rs @@ -1,5 +1,4 @@ use std::{ - fmt::Display, io::{BufRead, Cursor, Read, Seek}, path::Path, }; @@ -10,25 +9,11 @@ use zip::read::ZipFile; use crate::errors::{Error, Result}; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] -#[cfg_attr(feature = "clap", derive(clap::ValueEnum))] pub enum ReadingOrder { Rtl, Ltr, } -impl Display for ReadingOrder { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{}", - match self { - Self::Ltr => "ltr", - Self::Rtl => "rtl", - } - ) - } -} - #[derive(Debug, PartialEq)] pub struct Image { dynamic_image: DynamicImage, @@ -73,7 +58,9 @@ impl Image { }) } + #[allow(clippy::missing_errors_doc)] pub fn try_from_zip_file(mut file: ZipFile<'_>) -> Result { + #[allow(clippy::cast_possible_truncation)] let mut buf = Vec::with_capacity(file.size() as usize); file.read_to_end(&mut buf)?; @@ -153,6 +140,7 @@ impl Image { self } + #[allow(clippy::missing_errors_doc)] pub fn try_into_bytes(self) -> Result> { let mut buf = Cursor::new(Vec::new()); self.dynamic_image diff --git a/eco-cbz/src/lib.rs b/eco-cbz/src/lib.rs index a6c3ec9..eee1769 100644 --- a/eco-cbz/src/lib.rs +++ b/eco-cbz/src/lib.rs @@ -1,3 +1,5 @@ +#![deny(clippy::all, clippy::pedantic)] + pub mod cbz; pub mod cbz_metadata; pub mod errors; diff --git a/eco-converter/Cargo.toml b/eco-convert/Cargo.toml similarity index 78% rename from eco-converter/Cargo.toml rename to eco-convert/Cargo.toml index 11b03e1..18dab7e 100644 --- a/eco-converter/Cargo.toml +++ b/eco-convert/Cargo.toml @@ -1,13 +1,11 @@ [package] -name = "eco-converter" +name = "eco-convert" version = "0.1.0" edition.workspace = true rust-version.workspace = true [dependencies] -anyhow.workspace = true camino.workspace = true -clap.workspace = true eco-cbz.workspace = true eco-pack.workspace = true html5ever = { workspace = true, optional = true } @@ -15,9 +13,9 @@ image.workspace = true markup5ever_rcdom = { workspace = true, optional = true } mobi.workspace = true pdf.workspace = true +thiserror.workspace = true tl.workspace = true tracing.workspace = true -tracing-subscriber.workspace = true [features] default = [] diff --git a/eco-convert/src/errors.rs b/eco-convert/src/errors.rs new file mode 100644 index 0000000..8006e75 --- /dev/null +++ b/eco-convert/src/errors.rs @@ -0,0 +1,25 @@ +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("io error {0}")] + Io(#[from] std::io::Error), + + #[error("cbz error {0}")] + Cbz(#[from] eco_cbz::Error), + + #[error("mobi error {0}")] + Mobi(#[from] mobi::MobiError), + + #[error("pdf error {0}")] + Pdf(#[from] pdf::PdfError), + + #[error("ts parse error {0}")] + TlParse(#[from] tl::ParseError), + + #[error("pack error {0}")] + Pack(#[from] eco_pack::Error), + + #[error("invalid mobi version {0}")] + InvalidMobiVersion(u32), +} + +pub type Result = std::result::Result; diff --git a/eco-convert/src/lib.rs b/eco-convert/src/lib.rs new file mode 100644 index 0000000..3d345ec --- /dev/null +++ b/eco-convert/src/lib.rs @@ -0,0 +1,77 @@ +#![deny(clippy::all, clippy::pedantic)] + +use std::fs; + +use camino::Utf8PathBuf; +use eco_cbz::image::ReadingOrder; +use eco_pack::pack_imgs_to_cbz; +use tracing::info; + +pub use crate::errors::{Error, Result}; +pub use crate::mobi::convert_to_imgs as mobi_to_imgs; +pub use crate::pdf::convert_to_imgs as pdf_to_imgs; + +pub mod errors; +mod mobi; +mod pdf; +mod utils; + +#[derive(Debug, Clone, Copy)] +pub enum Format { + Mobi, + Azw3, + Pdf, +} + +#[derive(Debug)] +pub struct ConvertOptions { + /// Path to the source file + pub path: Utf8PathBuf, + + /// Source format + pub from: Format, + + /// Dir to output images + pub outdir: Utf8PathBuf, + + /// The archive name + pub name: String, + + /// Adjust images contrast + pub contrast: Option, + + /// Adjust images brightness + pub brightness: Option, + + /// Blur image (slow with big numbers) + pub blur: Option, + + /// Automatically split landscape images into 2 pages + pub autosplit: bool, + + /// Reading order + pub reading_order: ReadingOrder, +} + +#[allow(clippy::missing_errors_doc)] +pub fn convert(opts: ConvertOptions) -> Result<()> { + fs::create_dir_all(&opts.outdir)?; + let imgs = match opts.from { + Format::Mobi | Format::Azw3 => mobi_to_imgs(opts.path)?, + Format::Pdf => pdf_to_imgs(opts.path)?, + }; + info!("found {} imgs", imgs.len()); + + let cbz_writer = pack_imgs_to_cbz( + imgs, + opts.contrast, + opts.brightness, + opts.blur, + opts.autosplit, + opts.reading_order, + )?; + + cbz_writer.write_to_path(opts.outdir.join(format!("{}.cbz", opts.name)))?; + + Ok(()) +} diff --git a/eco-converter/src/mobi/html5ever_parser.rs b/eco-convert/src/mobi/html5ever_parser.rs similarity index 95% rename from eco-converter/src/mobi/html5ever_parser.rs rename to eco-convert/src/mobi/html5ever_parser.rs index 74dd649..8871a53 100644 --- a/eco-converter/src/mobi/html5ever_parser.rs +++ b/eco-convert/src/mobi/html5ever_parser.rs @@ -1,16 +1,16 @@ use std::{fs, io::BufReader, path::Path}; -use anyhow::Result; use eco_cbz::image::Image; use html5ever::{parse_document, tendril::TendrilSink, ParseOpts}; use markup5ever_rcdom::{Node, NodeData, RcDom}; use mobi::Mobi; use tracing::{error, warn}; -use crate::utils::base_32; +use crate::{utils::base_32, Result}; use super::MobiVersion; +#[allow(clippy::missing_errors_doc)] pub fn convert_to_imgs(path: impl AsRef) -> Result> { let mobi = Mobi::from_path(path)?; // Or is it `gen_version`? Both were equal in all the files I tested. diff --git a/eco-converter/src/mobi/mod.rs b/eco-convert/src/mobi/mod.rs similarity index 80% rename from eco-converter/src/mobi/mod.rs rename to eco-convert/src/mobi/mod.rs index bb12b2f..b5cdcd2 100644 --- a/eco-converter/src/mobi/mod.rs +++ b/eco-convert/src/mobi/mod.rs @@ -3,6 +3,8 @@ pub use html5ever_parser::convert_to_imgs; #[cfg(not(feature = "html5ever"))] pub use tl_parser::convert_to_imgs; +use crate::Error; + #[cfg(feature = "html5ever")] mod html5ever_parser; #[cfg(not(feature = "html5ever"))] @@ -15,13 +17,13 @@ enum MobiVersion { } impl TryFrom for MobiVersion { - type Error = anyhow::Error; + type Error = crate::Error; fn try_from(version: u32) -> std::result::Result { match version { 6 => Ok(Self::Mobi6), 8 => Ok(Self::Mobi8), - _ => anyhow::bail!("invalid version {version}"), + _ => Err(Error::InvalidMobiVersion(version)), } } } diff --git a/eco-converter/src/mobi/tl_parser.rs b/eco-convert/src/mobi/tl_parser.rs similarity index 95% rename from eco-converter/src/mobi/tl_parser.rs rename to eco-convert/src/mobi/tl_parser.rs index 3cbc275..283ed4b 100644 --- a/eco-converter/src/mobi/tl_parser.rs +++ b/eco-convert/src/mobi/tl_parser.rs @@ -1,15 +1,15 @@ use std::path::Path; -use anyhow::Result; use eco_cbz::image::Image; use mobi::Mobi; use tl::{HTMLTag, ParserOptions, VDom}; use tracing::{debug, error, warn}; -use crate::utils::base_32; +use crate::{utils::base_32, Result}; use super::MobiVersion; +#[allow(clippy::missing_errors_doc)] pub fn convert_to_imgs(path: impl AsRef) -> Result> { let mobi = Mobi::from_path(path)?; // Or is it `gen_version`? Both were equal in all the files I tested. diff --git a/eco-converter/src/pdf.rs b/eco-convert/src/pdf.rs similarity index 94% rename from eco-converter/src/pdf.rs rename to eco-convert/src/pdf.rs index 9902f50..c45873d 100644 --- a/eco-converter/src/pdf.rs +++ b/eco-convert/src/pdf.rs @@ -1,6 +1,5 @@ use std::{io::Cursor, path::Path}; -use anyhow::Result; use eco_cbz::image::Image; use pdf::{ enc::StreamFilter, @@ -9,6 +8,9 @@ use pdf::{ }; use tracing::error; +use crate::Result; + +#[allow(clippy::missing_errors_doc)] pub fn convert_to_imgs(path: impl AsRef) -> Result> { let pdf = PdfFileOptions::cached().open(path)?; // We may have actually less images than the count but never more, diff --git a/eco-converter/src/utils.rs b/eco-convert/src/utils.rs similarity index 100% rename from eco-converter/src/utils.rs rename to eco-convert/src/utils.rs diff --git a/eco-converter/src/main.rs b/eco-converter/src/main.rs deleted file mode 100644 index a8bd821..0000000 --- a/eco-converter/src/main.rs +++ /dev/null @@ -1,80 +0,0 @@ -#![deny(clippy::all, clippy::pedantic)] - -use std::fs; - -use anyhow::Result; -use camino::Utf8PathBuf; -use clap::{Parser, ValueEnum}; -use eco_cbz::image::ReadingOrder; -use eco_pack::pack_imgs_to_cbz; -use tracing::info; - -use crate::mobi::convert_to_imgs as mobi_to_imgs; -use crate::pdf::convert_to_imgs as pdf_to_imgs; - -mod mobi; -mod pdf; -mod utils; - -#[derive(Debug, Clone, Copy, ValueEnum)] -enum Format { - Mobi, - Azw3, - Pdf, -} - -#[derive(Debug, Parser)] -#[command(author, version, about, long_about = None)] -struct Args { - /// Path to the source file - path: Utf8PathBuf, - /// Source format - #[clap(long, short)] - from: Format, - /// Dir to output images - #[clap(long, short)] - outdir: Utf8PathBuf, - /// The archive name - #[clap(long, short)] - name: String, - /// Adjust images contrast - #[clap(long)] - contrast: Option, - /// Adjust images brightness - #[clap(long)] - brightness: Option, - /// Blur image (slow with big numbers) - #[clap(long)] - blur: Option, - /// Automatically split landscape images into 2 pages - #[clap(long, action)] - autosplit: bool, - /// Reading order - #[clap(long, default_value_t = ReadingOrder::Rtl)] - reading_order: ReadingOrder, -} - -fn main() -> Result<()> { - tracing_subscriber::fmt::init(); - - let args = Args::parse(); - fs::create_dir_all(&args.outdir)?; - let imgs = match args.from { - Format::Mobi | Format::Azw3 => mobi_to_imgs(args.path)?, - Format::Pdf => pdf_to_imgs(args.path)?, - }; - info!("found {} imgs", imgs.len()); - - let cbz_writer = pack_imgs_to_cbz( - imgs, - args.contrast, - args.brightness, - args.blur, - args.autosplit, - args.reading_order, - )?; - - cbz_writer.write_to_path(args.outdir.join(format!("{}.cbz", args.name)))?; - - Ok(()) -} diff --git a/eco-merge/Cargo.toml b/eco-merge/Cargo.toml index 9b8a6eb..3c4ce1d 100644 --- a/eco-merge/Cargo.toml +++ b/eco-merge/Cargo.toml @@ -5,10 +5,8 @@ edition.workspace = true rust-version.workspace = true [dependencies] -anyhow.workspace = true camino.workspace = true -clap.workspace = true eco-cbz.workspace = true glob.workspace = true +thiserror.workspace = true tracing.workspace = true -tracing-subscriber.workspace = true diff --git a/eco-merge/src/errors.rs b/eco-merge/src/errors.rs new file mode 100644 index 0000000..470fdde --- /dev/null +++ b/eco-merge/src/errors.rs @@ -0,0 +1,13 @@ +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("cbz error {0}")] + Cbz(#[from] eco_cbz::Error), + + #[error("glob error {0}")] + Glob(#[from] glob::GlobError), + + #[error("glob pattern error {0}")] + GlobPattern(#[from] glob::PatternError), +} + +pub type Result = std::result::Result; diff --git a/eco-merge/src/main.rs b/eco-merge/src/lib.rs similarity index 58% rename from eco-merge/src/main.rs rename to eco-merge/src/lib.rs index d644d61..c81367b 100644 --- a/eco-merge/src/main.rs +++ b/eco-merge/src/lib.rs @@ -1,34 +1,31 @@ #![deny(clippy::all, clippy::pedantic)] -use anyhow::Result; use camino::Utf8PathBuf; -use clap::Parser; use eco_cbz::{CbzReader, CbzWriter}; use glob::glob; use tracing::warn; -#[derive(Parser, Debug)] -#[clap(about, author, version)] -pub struct Args { +pub use crate::errors::{Error, Result}; + +pub mod errors; + +#[derive(Debug)] +pub struct MergeOptions { /// A glob that matches all the archive to merge - #[clap(short, long)] pub archives_glob: String, + /// The output directory for the merged archive - #[clap(short, long)] pub outdir: Utf8PathBuf, + /// The merged archive name - #[clap(short, long)] pub name: String, } -fn main() -> Result<()> { - tracing_subscriber::fmt::init(); - - let args = Args::parse(); - +#[allow(clippy::missing_errors_doc, clippy::needless_pass_by_value)] +pub fn merge(opts: MergeOptions) -> Result<()> { let mut merged_cbz_writer = CbzWriter::default(); - for path in glob(&args.archives_glob)? { + for path in glob(&opts.archives_glob)? { let mut current_cbz = CbzReader::try_from_path(path?)?; current_cbz.try_for_each(|image| { @@ -36,16 +33,16 @@ fn main() -> Result<()> { Ok(image) => image, Err(err) => { warn!("not a valid image: {err}"); - return Ok::<(), anyhow::Error>(()); + return Ok::<(), Error>(()); } }; merged_cbz_writer.insert(image)?; - Ok::<(), anyhow::Error>(()) + Ok::<(), Error>(()) })?; } - merged_cbz_writer.write_to_path(args.outdir.join(format!("{}.cbz", args.name)))?; + merged_cbz_writer.write_to_path(opts.outdir.join(format!("{}.cbz", opts.name)))?; Ok(()) } diff --git a/eco-pack/Cargo.toml b/eco-pack/Cargo.toml index 68822ec..739dde6 100644 --- a/eco-pack/Cargo.toml +++ b/eco-pack/Cargo.toml @@ -5,10 +5,8 @@ edition.workspace = true rust-version.workspace = true [dependencies] -anyhow.workspace = true camino.workspace = true -clap.workspace = true -eco-cbz = { workspace = true, features = ["clap", "metadata"] } +eco-cbz = { workspace = true, features = ["metadata"] } glob.workspace = true thiserror.workspace = true tracing.workspace = true diff --git a/eco-pack/src/errors.rs b/eco-pack/src/errors.rs index 0249d2f..714644c 100644 --- a/eco-pack/src/errors.rs +++ b/eco-pack/src/errors.rs @@ -3,14 +3,17 @@ pub enum Error { #[error("error: {0}")] Generic(String), - #[error("Glob error: {0}")] + #[error("glob error: {0}")] Glob(#[from] glob::GlobError), - #[error("Glob pattern error: {0}")] + #[error("glob pattern error: {0}")] GlobPattern(#[from] glob::PatternError), - #[error("Cbz error: {0}")] - Image(#[from] eco_cbz::Error), + #[error("cbz error: {0}")] + Cbz(#[from] eco_cbz::Error), + + #[error("io error: {0}")] + Io(#[from] std::io::Error), } pub type Result = std::result::Result; diff --git a/eco-pack/src/lib.rs b/eco-pack/src/lib.rs index 2344863..5a40010 100644 --- a/eco-pack/src/lib.rs +++ b/eco-pack/src/lib.rs @@ -1,8 +1,8 @@ #![deny(clippy::all, clippy::pedantic)] -use std::io::Cursor; +use std::{env, fs::create_dir_all, io::Cursor}; -use camino::Utf8Path; +use camino::{Utf8Path, Utf8PathBuf}; use eco_cbz::{ image::{Image, ReadingOrder}, CbzWriter, @@ -10,7 +10,7 @@ use eco_cbz::{ use glob::glob; use tracing::{debug, error}; -use crate::errors::Result; +pub use crate::errors::{Error, Result}; pub mod errors; @@ -66,3 +66,57 @@ pub fn pack_imgs_to_cbz( Ok(cbz_writer) } + +#[derive(Debug)] +pub struct PackOptions { + /// A glob that matches all the files to pack + pub files_descriptor: String, + + /// The output directory for the merged archive + pub outdir: Utf8PathBuf, + + /// The merged archive name + pub name: String, + + /// Adjust images contrast + pub contrast: Option, + + /// Adjust images brightness + pub brightness: Option, + + /// Blur image (slow with big numbers) + pub blur: Option, + + /// Automatically split landscape images into 2 pages + pub autosplit: bool, + + /// Reading order + pub reading_order: ReadingOrder, +} + +#[allow(clippy::missing_errors_doc)] +pub fn pack(opts: PackOptions) -> Result<()> { + let Ok(current_dir) = Utf8PathBuf::from_path_buf(env::current_dir()?) else { + return Err(Error::Generic( + "current dir is not a valid utf8 path".to_string(), + )); + }; + let outdir = current_dir.join(&opts.outdir); + if !outdir.exists() { + create_dir_all(&*outdir)?; + } + let imgs = get_images_from_glob(opts.files_descriptor)?; + + let cbz_writer = pack_imgs_to_cbz( + imgs, + opts.contrast, + opts.brightness, + opts.blur, + opts.autosplit, + opts.reading_order, + )?; + + cbz_writer.write_to_path(outdir.join(format!("{}.cbz", opts.name)))?; + + Ok(()) +} diff --git a/eco-pack/src/main.rs b/eco-pack/src/main.rs deleted file mode 100644 index 6628392..0000000 --- a/eco-pack/src/main.rs +++ /dev/null @@ -1,64 +0,0 @@ -#![deny(clippy::all, clippy::pedantic)] - -use std::{env, fs::create_dir}; - -use anyhow::{bail, Result}; -use camino::Utf8PathBuf; -use clap::Parser; -use eco_cbz::image::ReadingOrder; -use eco_pack::{get_images_from_glob, pack_imgs_to_cbz}; - -#[derive(Parser, Debug)] -#[clap(about, author, version)] -pub struct Args { - /// A glob that matches all the files to pack - files_descriptor: String, - /// The output directory for the merged archive - #[clap(short, long, default_value = "./")] - outdir: Utf8PathBuf, - /// The merged archive name - #[clap(short, long)] - name: String, - /// Adjust images contrast - #[clap(long)] - contrast: Option, - /// Adjust images brightness - #[clap(long)] - brightness: Option, - /// Blur image (slow with big numbers) - #[clap(long)] - blur: Option, - /// Automatically split landscape images into 2 pages - #[clap(long, action)] - autosplit: bool, - /// Reading order - #[clap(long, default_value_t = ReadingOrder::Rtl)] - reading_order: ReadingOrder, -} - -fn main() -> Result<()> { - tracing_subscriber::fmt::init(); - - let args = Args::parse(); - let Ok(current_dir) = Utf8PathBuf::from_path_buf(env::current_dir()?) else { - bail!("current dir is not a valid utf-8 path"); - }; - let outdir = current_dir.join(&args.outdir); - if !outdir.exists() { - create_dir(&*outdir)?; - } - let imgs = get_images_from_glob(args.files_descriptor)?; - - let cbz_writer = pack_imgs_to_cbz( - imgs, - args.contrast, - args.brightness, - args.blur, - args.autosplit, - args.reading_order, - )?; - - cbz_writer.write_to_path(outdir.join(format!("{}.cbz", args.name)))?; - - Ok(()) -} diff --git a/eco-viewer/Cargo.toml b/eco-view/Cargo.toml similarity index 76% rename from eco-viewer/Cargo.toml rename to eco-view/Cargo.toml index 9f73998..76d9305 100644 --- a/eco-viewer/Cargo.toml +++ b/eco-view/Cargo.toml @@ -1,14 +1,12 @@ [package] -name = "eco-viewer" +name = "eco-view" version = "0.1.0" edition.workspace = true rust-version.workspace = true [dependencies] -anyhow.workspace = true base64.workspace = true camino.workspace = true -clap = { workspace = true, features = ["derive"] } dark-light.workspace = true dioxus.workspace = true dioxus-desktop.workspace = true @@ -20,5 +18,4 @@ iced = { workspace = true, features = ["image"] } thiserror.workspace = true tl.workspace = true tracing.workspace = true -tracing-subscriber.workspace = true zip.workspace = true diff --git a/eco-viewer/src/components/doc_page.rs b/eco-view/src/components/doc_page.rs similarity index 100% rename from eco-viewer/src/components/doc_page.rs rename to eco-view/src/components/doc_page.rs diff --git a/eco-viewer/src/components/mod.rs b/eco-view/src/components/mod.rs similarity index 100% rename from eco-viewer/src/components/mod.rs rename to eco-view/src/components/mod.rs diff --git a/eco-viewer/src/doc.rs b/eco-view/src/doc.rs similarity index 94% rename from eco-viewer/src/doc.rs rename to eco-view/src/doc.rs index 8e7e2a6..b1bd604 100644 --- a/eco-viewer/src/doc.rs +++ b/eco-view/src/doc.rs @@ -7,18 +7,15 @@ use std::{ use base64::Engine; use camino::Utf8Path; -use clap::ValueEnum; use eco_cbz::CbzReader; use tl::{HTMLTag, ParserOptions, VDom}; use tracing::debug; use crate::errors::{Error, Result}; -#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum FileType { - #[clap(name = "cbz")] Cbz, - #[clap(skip, name = "epub")] EPub, } diff --git a/eco-viewer/src/errors.rs b/eco-view/src/errors.rs similarity index 76% rename from eco-viewer/src/errors.rs rename to eco-view/src/errors.rs index b7a6636..a195be0 100644 --- a/eco-viewer/src/errors.rs +++ b/eco-view/src/errors.rs @@ -4,7 +4,7 @@ pub enum Error { Io(#[from] std::io::Error), #[error("cbz error: {0}")] - Cbz(#[from] eco_cbz::errors::Error), + Cbz(#[from] eco_cbz::Error), #[error("epub doc error: {0}")] EpubDoc(#[from] epub::doc::DocError), @@ -23,6 +23,12 @@ pub enum Error { #[error("page not found: {0}")] PageNotFound(usize), + + #[error("invalid non utf8 path provided")] + InvalidNonUtf8Path, + + #[error("unknown file type provided")] + UnknownFileType, } pub type Result = std::result::Result; diff --git a/eco-viewer/src/lib.rs b/eco-view/src/lib.rs similarity index 89% rename from eco-viewer/src/lib.rs rename to eco-view/src/lib.rs index ff52768..7995bf1 100644 --- a/eco-viewer/src/lib.rs +++ b/eco-view/src/lib.rs @@ -4,7 +4,7 @@ use std::{cell::Cell, thread}; -use camino::Utf8Path; +use camino::Utf8PathBuf; use dioxus::{ html::{geometry::WheelDelta, input_data::keyboard_types::Key}, prelude::*, @@ -17,11 +17,11 @@ use tracing::{debug, error}; use crate::components::doc_page::DocPage; pub use crate::doc::FileType; use crate::doc::SharedDoc; -use crate::errors::Result; +pub use crate::errors::{Error, Result}; mod components; mod doc; -mod errors; +pub mod errors; mod measure; fn load_pages( @@ -49,6 +49,15 @@ fn load_pages( }); } +#[derive(Debug)] +pub struct ViewOptions { + /// The path to the e-book file to view + pub path: Utf8PathBuf, + + /// Type of the file + pub type_: Option, +} + /// Starts a new window with the viewer inside /// /// ## Errors @@ -56,9 +65,19 @@ fn load_pages( /// Fails on file read error /// /// ## Panics -pub fn run(path: impl AsRef, type_: FileType) -> Result<()> { +pub fn view(opts: ViewOptions) -> Result<()> { + let Ok(path) = Utf8PathBuf::try_from(dunce::canonicalize(opts.path)?) else { + return Err(Error::InvalidNonUtf8Path); + }; + let Some(file_type) = opts + .type_ + .or_else(|| path.extension().and_then(|ext| ext.parse().ok())) + else { + return Err(Error::UnknownFileType); + }; + let path = path.as_ref(); - let (max_page, doc) = try_load_shared_doc_from_path(type_, path)?; + let (max_page, doc) = try_load_shared_doc_from_path(file_type, path)?; let (page_loaded_sender, page_loaded_receiver) = mpsc::unbounded::<()>(); let measure = crate::measure::Measure::new("total document loading time", crate::measure::Precision::Ms); diff --git a/eco-viewer/src/measure.rs b/eco-view/src/measure.rs similarity index 100% rename from eco-viewer/src/measure.rs rename to eco-view/src/measure.rs diff --git a/eco-viewer/src/main.rs b/eco-viewer/src/main.rs deleted file mode 100644 index 19094f2..0000000 --- a/eco-viewer/src/main.rs +++ /dev/null @@ -1,33 +0,0 @@ -#![deny(clippy::all, clippy::pedantic)] - -use anyhow::{bail, Result}; -use camino::Utf8PathBuf; -use clap::Parser; -use eco_viewer::FileType; - -#[derive(Debug, Parser)] -#[clap(about, author, version)] -pub struct Args { - /// The path to the e-book file to view - pub path: Utf8PathBuf, - - /// Type of the file - #[clap(long = "type")] - pub type_: Option, -} - -fn main() -> Result<()> { - tracing_subscriber::fmt::init(); - let args = Args::parse(); - let path = Utf8PathBuf::try_from(dunce::canonicalize(args.path)?)?; - let Some(file_type) = args - .type_ - .or_else(|| path.extension().and_then(|ext| ext.parse().ok())) - else { - bail!("unknown file type"); - }; - - eco_viewer::run(path, file_type)?; - - Ok(()) -} diff --git a/eco/Cargo.toml b/eco/Cargo.toml new file mode 100644 index 0000000..c70c985 --- /dev/null +++ b/eco/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "eco" +version = "0.1.0" +edition.workspace = true +rust-version.workspace = true + +[dependencies] +camino.workspace = true +clap.workspace = true +eco-cbz.workspace = true +eco-convert.workspace = true +eco-merge.workspace = true +eco-pack.workspace = true +eco-view.workspace = true +thiserror.workspace = true +tracing.workspace = true +tracing-subscriber.workspace = true diff --git a/eco/src/errors.rs b/eco/src/errors.rs new file mode 100644 index 0000000..f27533e --- /dev/null +++ b/eco/src/errors.rs @@ -0,0 +1,16 @@ +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("convert error {0}")] + Convert(#[from] eco_convert::Error), + + #[error("merge error {0}")] + Merge(#[from] eco_merge::Error), + + #[error("pack error {0}")] + Pack(#[from] eco_pack::Error), + + #[error("view error {0}")] + View(#[from] eco_view::Error), +} + +pub type Result = std::result::Result; diff --git a/eco/src/main.rs b/eco/src/main.rs new file mode 100644 index 0000000..077c1b4 --- /dev/null +++ b/eco/src/main.rs @@ -0,0 +1,182 @@ +#![deny(clippy::all, clippy::pedantic)] + +use camino::Utf8PathBuf; +use clap::{Parser, Subcommand}; +use types::FileType; + +use crate::errors::Result; +use crate::types::{Format, ReadingOrder}; + +mod errors; +mod types; + +#[derive(Debug, Parser)] +#[clap(name = "eco", author, version, about, long_about = None)] +struct Args { + #[clap(flatten)] + global_opts: GlobalOpts, + + #[clap(subcommand)] + command: Command, +} + +#[derive(Debug, clap::Args)] +struct GlobalOpts { + verbose: bool, +} + +#[derive(Debug, Subcommand)] +enum Command { + Convert { + /// Path to the source file + path: Utf8PathBuf, + + /// Source format + #[clap(long, short)] + from: Format, + + /// Dir to output images + #[clap(long, short)] + outdir: Utf8PathBuf, + + /// The archive name + #[clap(long, short)] + name: String, + + /// Adjust images contrast + #[clap(long)] + contrast: Option, + + /// Adjust images brightness + #[clap(long)] + brightness: Option, + + /// Blur image (slow with big numbers) + #[clap(long)] + blur: Option, + + /// Automatically split landscape images into 2 pages + #[clap(long, action)] + autosplit: bool, + + /// Reading order + #[clap(long, default_value_t = ReadingOrder::Rtl)] + reading_order: ReadingOrder, + }, + Merge { + /// A glob that matches all the archive to merge + #[clap(short, long)] + archives_glob: String, + + /// The output directory for the merged archive + #[clap(short, long)] + outdir: Utf8PathBuf, + + /// The merged archive name + #[clap(short, long)] + name: String, + }, + Pack { + /// A glob that matches all the files to pack + files_descriptor: String, + + /// The output directory for the merged archive + #[clap(short, long, default_value = "./")] + outdir: Utf8PathBuf, + + /// The merged archive name + #[clap(short, long)] + name: String, + + /// Adjust images contrast + #[clap(long)] + contrast: Option, + + /// Adjust images brightness + #[clap(long)] + brightness: Option, + + /// Blur image (slow with big numbers) + #[clap(long)] + blur: Option, + + /// Automatically split landscape images into 2 pages + #[clap(long, action)] + autosplit: bool, + + /// Reading order + #[clap(long, default_value_t = ReadingOrder::Rtl)] + reading_order: ReadingOrder, + }, + View { + /// The path to the e-book file to view + path: Utf8PathBuf, + + /// Type of the file + #[clap(long = "type")] + type_: Option, + }, +} + +fn main() -> Result<()> { + tracing_subscriber::fmt::init(); + let args = Args::parse(); + + match args.command { + Command::Convert { + path, + from, + outdir, + name, + contrast, + brightness, + blur, + autosplit, + reading_order, + } => eco_convert::convert(eco_convert::ConvertOptions { + path, + from: from.into(), + outdir, + name, + contrast, + brightness, + blur, + autosplit, + reading_order: reading_order.into(), + })?, + Command::Merge { + archives_glob, + outdir, + name, + } => eco_merge::merge(eco_merge::MergeOptions { + archives_glob, + outdir, + name, + })?, + Command::Pack { + files_descriptor, + outdir, + name, + contrast, + brightness, + blur, + autosplit, + reading_order, + } => eco_pack::pack(eco_pack::PackOptions { + files_descriptor, + outdir, + name, + contrast, + brightness, + blur, + autosplit, + reading_order: reading_order.into(), + })?, + Command::View { path, type_ } => eco_view::view(eco_view::ViewOptions { + path, + type_: type_.map(Into::into), + })?, + } + + Ok(()) +} diff --git a/eco/src/types.rs b/eco/src/types.rs new file mode 100644 index 0000000..a23d43f --- /dev/null +++ b/eco/src/types.rs @@ -0,0 +1,66 @@ +use std::fmt::Display; + +use clap::ValueEnum; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, ValueEnum)] +pub enum ReadingOrder { + Rtl, + Ltr, +} + +impl From for eco_cbz::ReadingOrder { + fn from(value: ReadingOrder) -> Self { + match value { + ReadingOrder::Ltr => Self::Ltr, + ReadingOrder::Rtl => Self::Rtl, + } + } +} + +impl Display for ReadingOrder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + match self { + Self::Ltr => "ltr", + Self::Rtl => "rtl", + } + ) + } +} + +// TODO: Format and FileType can, and should, be merged together, but the underlying should support them +#[derive(Debug, Clone, Copy, ValueEnum)] +pub enum Format { + Mobi, + Azw3, + Pdf, +} + +impl From for eco_convert::Format { + fn from(value: Format) -> Self { + match value { + Format::Azw3 => Self::Azw3, + Format::Mobi => Self::Mobi, + Format::Pdf => Self::Pdf, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, ValueEnum)] +pub enum FileType { + #[clap(name = "cbz")] + Cbz, + #[clap(skip, name = "epub")] + EPub, +} + +impl From for eco_view::FileType { + fn from(value: FileType) -> Self { + match value { + FileType::Cbz => Self::Cbz, + FileType::EPub => Self::EPub, + } + } +}