From 742a2d1b7f489cff1547b3190951759ce0b7ebd6 Mon Sep 17 00:00:00 2001 From: Rod S Date: Sun, 8 Dec 2024 16:33:05 -0800 Subject: [PATCH] Delete change detector, just build the whole font always. Shuffle things as a result. --- README.md | 11 +- fontc/src/args.rs | 12 +- fontc/src/change_detector.rs | 387 ------------------ fontc/src/config.rs | 131 ------- fontc/src/error.rs | 3 - fontc/src/lib.rs | 736 ++++------------------------------- fontc/src/timing.rs | 1 - fontc/src/work.rs | 9 + fontc/src/workload.rs | 355 ++++++++++------- fontir/Cargo.toml | 3 - fontir/src/error.rs | 23 +- fontir/src/glyph.rs | 3 +- fontir/src/lib.rs | 2 - fontir/src/orchestration.rs | 12 +- fontir/src/paths.rs | 3 - fontir/src/serde.rs | 99 ----- fontir/src/source.rs | 179 ++------- fontir/src/stateset.rs | 458 ---------------------- fontra2fontir/src/source.rs | 204 ++++------ glyphs2fontir/src/source.rs | 377 +++++------------- ufo2fontir/src/source.rs | 489 ++++++++--------------- 21 files changed, 663 insertions(+), 2834 deletions(-) delete mode 100644 fontc/src/change_detector.rs delete mode 100644 fontc/src/config.rs delete mode 100644 fontir/src/serde.rs delete mode 100644 fontir/src/stateset.rs diff --git a/README.md b/README.md index 993f4a095..821afd316 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ Where in we pursue oxidizing [fontmake](https://github.com/googlefonts/fontmake) For context around where fontmake came from see [Mr B goes to Vartown](https://github.com/googlefonts/oxidize/blob/main/text/2023-10-18-mrb-goes-to-vartown.md). -Converts source to IR, and then IR to font binary. Aims to be safe, incremental, and fast. +Converts source to IR, and then IR to font binary. Aims to be safe and fast. References @@ -47,14 +47,13 @@ Install the latest version of Rust, https://www.rust-lang.org/tools/install. $ cargo run -p fontc -- resources/testdata/wght_var.designspace ``` -### Emit IR to enable incremental builds +### Emit IR -If you pass the `--incremental` (or `-i`) option, the IR will be written to disk inside -the build working directory, so that the next time you run fontc with the same source file -only what changed will be rebuilt. +If you pass the `--emit-ir` option, the IR will be written to disk inside +the build working directory. This can be helpful when troubleshooting. ```shell -$ cargo run -p fontc -- --incremental resources/testdata/wght_var.designspace +$ cargo run -p fontc -- --emit-ir resources/testdata/wght_var.designspace $ ls build/ ``` diff --git a/fontc/src/args.rs b/fontc/src/args.rs index 49d72b176..be33a7cde 100644 --- a/fontc/src/args.rs +++ b/fontc/src/args.rs @@ -23,9 +23,9 @@ pub struct Args { #[arg(short, long)] source: Option, - /// Whether to write IR to disk. Must be true if you want incremental compilation. + /// Whether to write IR to disk. #[arg(short, long, default_value = "false")] - pub incremental: bool, + pub emit_ir: bool, /// Output file name (default: build/font.ttf) #[arg(short, long)] @@ -109,7 +109,7 @@ impl Args { pub fn flags(&self) -> Flags { let mut flags = Flags::default(); - flags.set(Flags::EMIT_IR, self.incremental); + flags.set(Flags::EMIT_IR, self.emit_ir); flags.set(Flags::EMIT_DEBUG, self.emit_debug); flags.set(Flags::PREFER_SIMPLE_GLYPHS, self.prefer_simple_glyphs); flags.set(Flags::FLATTEN_COMPONENTS, self.flatten_components); @@ -129,7 +129,7 @@ impl Args { glyph_name_filter: None, input_source: Some(input_source), source: None, - incremental: true, + emit_ir: false, output_file: None, emit_debug: false, // they get destroyed by test cleanup emit_timing: false, @@ -152,7 +152,9 @@ impl Args { use crate::testdata_dir; let input_source = testdata_dir().join(source).canonicalize().unwrap(); - Self::new(build_dir, input_source) + let mut result = Self::new(build_dir, input_source); + result.emit_ir = true; + result } /// The input source to compile. diff --git a/fontc/src/change_detector.rs b/fontc/src/change_detector.rs deleted file mode 100644 index 94116b7ca..000000000 --- a/fontc/src/change_detector.rs +++ /dev/null @@ -1,387 +0,0 @@ -//! tracking changes during compilation - -use std::{ffi::OsStr, fmt::Debug, fs, path::Path}; - -use bitflags::bitflags; -use fontbe::{ - orchestration::{AnyWorkId, WorkId as BeWorkIdentifier}, - paths::Paths as BePaths, -}; -use fontra2fontir::source::FontraIrSource; - -use crate::{create_timer, timing::JobTimer, work::AnyWork, workload::Workload, Config, Error}; -use fontdrasil::{coords::NormalizedLocation, types::GlyphName}; -use fontir::{ - orchestration::WorkId as FeWorkIdentifier, - paths::Paths as IrPaths, - source::{Input, Source}, -}; -use glyphs2fontir::source::GlyphsIrSource; -use ufo2fontir::source::DesignSpaceIrSource; - -use indexmap::IndexSet; -use regex::Regex; - -bitflags! { - #[derive(Clone, Copy, Debug)] - pub struct BuildFlags: u32 { - /// We need to generate the direct output artifacts - const BUILD_OUTPUTS = 0b0001; - /// We need to rebuild anything that depends on us - const BUILD_DEPENDENTS = 0b0010; - const BUILD_ALL = Self::BUILD_OUTPUTS.bits() | Self::BUILD_DEPENDENTS.bits(); - } -} - -/// Figures out what changed and helps create work to build updated versions -/// -/// Uses [Source] to abstract over whether source in .glyphs, .designspace, etc. -pub struct ChangeDetector { - glyph_name_filter: Option, - ir_paths: IrPaths, - ir_source: Box, - prev_inputs: Input, - current_inputs: Input, - be_paths: BePaths, - emit_ir: bool, - skip_features: bool, - static_metadata_changed: bool, - glyph_order_changed: bool, - glyphs_changed: IndexSet, - glyphs_deleted: IndexSet, -} - -impl Debug for ChangeDetector { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "ChangeDetector") - } -} - -impl ChangeDetector { - pub fn new( - config: Config, - ir_paths: IrPaths, - be_paths: BePaths, - prev_inputs: Input, - timer: &mut JobTimer, - ) -> Result { - let time = create_timer(AnyWorkId::InternalTiming("new change detector"), 0) - .queued() - .run(); - - let mut ir_source = ir_source(config.args.source())?; - let mut current_inputs = ir_source.inputs().map_err(Error::FontIrError)?; - - let glyph_name_filter = config - .args - .glyph_name_filter - .clone() - .map(|reg| reg.into_inner()); - - if let Some(regex) = &glyph_name_filter { - current_inputs.glyphs.retain(|glyph_name, _| { - let result = regex.is_match(glyph_name.as_str()); - if !result { - log::trace!("'{glyph_name}' does not match --glyph_name_filter"); - } - result - }); - } - - let static_metadata_changed = current_inputs.static_metadata != prev_inputs.static_metadata - || !ir_paths - .target_file(&FeWorkIdentifier::StaticMetadata) - .is_file(); - - let glyph_order_changed = static_metadata_changed - || !ir_paths - .target_file(&FeWorkIdentifier::GlyphOrder) - .is_file(); - - let glyphs_changed = if static_metadata_changed || glyph_order_changed { - current_inputs.glyphs.keys().cloned().collect() - } else { - current_inputs - .glyphs - .iter() - .filter_map( - |(glyph_name, curr_state)| match prev_inputs.glyphs.get(glyph_name) { - Some(prev_state) => { - // If the input changed or the output doesn't exist a rebuild is probably in order - (prev_state != curr_state - || !ir_paths - .target_file(&FeWorkIdentifier::Glyph(glyph_name.clone())) - .exists()) - .then_some(glyph_name) - } - None => Some(glyph_name), - }, - ) - .cloned() - .collect() - }; - - let glyphs_deleted = prev_inputs - .glyphs - .keys() - .filter(|glyph_name| !current_inputs.glyphs.contains_key(*glyph_name)) - .cloned() - .collect(); - - timer.add(time.complete()); - - Ok(ChangeDetector { - glyph_name_filter, - ir_paths, - ir_source, - prev_inputs, - current_inputs, - be_paths, - emit_ir: config.args.incremental, - skip_features: config.args.skip_features, - static_metadata_changed, - glyph_order_changed, - glyphs_changed, - glyphs_deleted, - }) - } - - pub fn glyph_name_filter(&self) -> Option<&Regex> { - self.glyph_name_filter.as_ref() - } - - pub fn current_inputs(&self) -> &Input { - &self.current_inputs - } - - pub fn ir_source(&self) -> &dyn Source { - self.ir_source.as_ref() - } - - pub fn ir_paths(&self) -> &IrPaths { - &self.ir_paths - } - - pub fn be_paths(&self) -> &BePaths { - &self.be_paths - } - - fn target_exists(&self, work_id: &AnyWorkId) -> bool { - match work_id { - AnyWorkId::Fe(work_id) => self.ir_paths.target_file(work_id).is_file(), - AnyWorkId::Be(work_id) => self.be_paths.target_file(work_id).is_file(), - AnyWorkId::InternalTiming(..) => false, - } - } - - fn input_changed(&self, work_id: &AnyWorkId) -> bool { - match work_id { - AnyWorkId::Fe(FeWorkIdentifier::StaticMetadata) => { - self.current_inputs.static_metadata != self.prev_inputs.static_metadata - } - AnyWorkId::Fe(FeWorkIdentifier::GlobalMetrics) => { - self.current_inputs.global_metrics != self.prev_inputs.global_metrics - } - AnyWorkId::Be(BeWorkIdentifier::GlyfFragment(glyph_name)) => { - self.current_inputs.glyphs.get(glyph_name) - != self.prev_inputs.glyphs.get(glyph_name) - } - AnyWorkId::Be(BeWorkIdentifier::GvarFragment(glyph_name)) => { - self.current_inputs.glyphs.get(glyph_name) - != self.prev_inputs.glyphs.get(glyph_name) - } - _ => panic!("input_changed does not yet support {work_id:?}"), - } - } - - fn output_exists(&self, work: &AnyWork) -> bool { - self.target_exists(&work.id()) - && work - .also_completes() - .iter() - .all(|id| self.target_exists(id)) - } - - pub fn should_skip_features(&self) -> bool { - self.skip_features - } - - /// Not all work ... works ... with this method; notably muts support input_changed. - pub(crate) fn simple_should_run(&self, work: &AnyWork) -> bool { - let work_id = work.id(); - !self.output_exists(work) || self.input_changed(&work_id) - } - - /// Simple work has simple, static, dependencies. Anything that depends on all-of-type - /// (e.g. all glyph ir) is not (yet) amenable to this path. - pub fn add_simple_work(&self, workload: &mut Workload, work: AnyWork) { - let output_exists = self.output_exists(&work); - let input_changed = self.input_changed(&work.id()); - let run = input_changed || !output_exists; - workload.add(work, run); - } - - pub fn create_workload(&mut self, timer: JobTimer) -> Result { - let mut workload = Workload::new(self, timer); - - // Create work roughly in the order it would typically occur - // Work is eligible to run as soon as all dependencies are complete - // so this is NOT the definitive execution order - - let source = self.ir_source.as_ref(); - - // Source => IR - self.add_simple_work( - &mut workload, - source - .create_static_metadata_work(&self.current_inputs)? - .into(), - ); - self.add_simple_work( - &mut workload, - source - .create_global_metric_work(&self.current_inputs)? - .into(), - ); - - Ok(workload) - } - - // TODO: could this be solved based on work id and also completes? - - pub fn static_metadata_ir_change(&self) -> bool { - self.static_metadata_changed - } - - pub fn glyph_order_ir_change(&self) -> bool { - self.glyph_order_changed - } - - pub fn global_metrics_ir_change(&self) -> bool { - self.current_inputs.global_metrics != self.prev_inputs.global_metrics - || !self - .ir_paths - .target_file(&FeWorkIdentifier::GlobalMetrics) - .is_file() - } - - pub fn feature_ir_change(&self) -> bool { - self.glyph_order_ir_change() - || self.current_inputs.features != self.prev_inputs.features - || !self - .ir_paths - .target_file(&FeWorkIdentifier::Features) - .is_file() - || !self - .ir_paths - .target_file(&FeWorkIdentifier::KerningGroups) - .is_file() - } - - pub fn feature_be_change(&self) -> bool { - self.feature_ir_change() - || self.kerning_be_change() - || self.mark_be_change() - || !self - .be_paths - .target_file(&BeWorkIdentifier::Features) - .is_file() - } - - pub fn mark_be_change(&self) -> bool { - // Glyphs produce anchors and we need anchors - !self.glyphs_changed.is_empty() - || !self - .be_paths - .target_file(&BeWorkIdentifier::Marks) - .is_file() - } - - pub fn kerning_be_change(&self) -> bool { - self.kerning_groups_ir_change() - || !self - .be_paths - .target_file(&BeWorkIdentifier::GatherIrKerning) - .is_file() - } - - pub fn kerning_groups_ir_change(&self) -> bool { - self.static_metadata_ir_change() - || self.glyph_order_ir_change() - || self.current_inputs.features != self.prev_inputs.features - || !self - .ir_paths - .target_file(&FeWorkIdentifier::KerningGroups) - .is_file() - } - - pub fn kerning_at_ir_change(&self, at: NormalizedLocation) -> bool { - self.kerning_groups_ir_change() - || self.current_inputs.features != self.prev_inputs.features - || !self - .ir_paths - .target_file(&FeWorkIdentifier::KernInstance(at)) - .is_file() - } - - pub fn avar_be_change(&self) -> bool { - self.static_metadata_ir_change() - || !self.be_paths.target_file(&BeWorkIdentifier::Avar).is_file() - } - - pub fn stat_be_change(&self) -> bool { - self.static_metadata_ir_change() - || !self.be_paths.target_file(&BeWorkIdentifier::Stat).is_file() - } - - pub fn fvar_be_change(&self) -> bool { - self.static_metadata_ir_change() - || !self.be_paths.target_file(&BeWorkIdentifier::Fvar).is_file() - } - - pub fn post_be_change(&self) -> bool { - self.static_metadata_ir_change() - || self.glyph_order_ir_change() - || !self.be_paths.target_file(&BeWorkIdentifier::Post).is_file() - } - - pub fn glyphs_changed(&self) -> &IndexSet { - &self.glyphs_changed - } - - pub fn glyphs_deleted(&self) -> &IndexSet { - &self.glyphs_deleted - } - - pub fn finish_successfully(self) -> Result<(), Error> { - if self.emit_ir { - let current_sources = - serde_yaml::to_string(&self.current_inputs).map_err(Error::YamlSerError)?; - let out_path = self.ir_paths.ir_input_file(); - fs::write(out_path, current_sources).map_err(|source| Error::FileIo { - path: out_path.to_owned(), - source, - }) - } else { - Ok(()) - } - } -} - -fn ir_source(source: &Path) -> Result, Error> { - if !source.exists() { - return Err(Error::FileExpected(source.to_path_buf())); - } - let ext = source - .extension() - .and_then(OsStr::to_str) - .ok_or_else(|| Error::UnrecognizedSource(source.to_path_buf()))?; - match ext { - "designspace" => Ok(Box::new(DesignSpaceIrSource::new(source.to_path_buf())?)), - "ufo" => Ok(Box::new(DesignSpaceIrSource::new(source.to_path_buf())?)), - "glyphs" => Ok(Box::new(GlyphsIrSource::new(source.to_path_buf()))), - "glyphspackage" => Ok(Box::new(GlyphsIrSource::new(source.to_path_buf()))), - "fontra" => Ok(Box::new(FontraIrSource::new(source.to_path_buf())?)), - _ => Err(Error::UnrecognizedSource(source.to_path_buf())), - } -} diff --git a/fontc/src/config.rs b/fontc/src/config.rs deleted file mode 100644 index 351558238..000000000 --- a/fontc/src/config.rs +++ /dev/null @@ -1,131 +0,0 @@ -//! State for a (possibly incremental) compiler job - -use std::{fs, path::PathBuf}; - -use fontir::{error::TrackFileError, paths::Paths as IrPaths, source::Input, stateset::StateSet}; -use serde::{Deserialize, Serialize}; - -use crate::{Args, Error}; - -/// The settings and compiler definition of a single compilation run. -/// -/// This tracks the input arguments for a compilation, as well as the compiler -/// version used (so that we do not reuse incremental state between different -/// compiler versions) -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] -pub struct Config { - pub args: Args, - // The compiler previously used so if the compiler changes config invalidates - compiler: StateSet, -} - -impl Config { - /// Create a new config from the provided cli arguments - pub fn new(args: Args) -> Result { - let mut compiler = StateSet::new(); - compiler - .track_file(&std::env::current_exe().expect("could not get current executable path"))?; - Ok(Config { args, compiler }) - } - - /// Returns the path to the config file for this compilation - fn file(&self) -> PathBuf { - self.args.build_dir.join("fontc.yml") - } - - /// Attempt to initialize the compiler for this run. - /// - /// This serializes the configuration if necessary, and returns any existing - /// incremental state for this run if it exists. - pub fn init(&self) -> Result { - let config_file = self.file(); - - let ir_paths = IrPaths::new(&self.args.build_dir); - let ir_input_file = ir_paths.ir_input_file(); - if self.has_changed() { - log::info!("Config changed, generating a new one"); - if ir_input_file.exists() { - fs::remove_file(ir_input_file) - .map_err(|_| Error::FileExpected(ir_input_file.to_owned()))?; - } - if self.args.incremental { - fs::write(&config_file, serde_yaml::to_string(self)?).map_err(|source| { - Error::FileIo { - path: config_file, - source, - } - })? - } - }; - - if !ir_input_file.exists() { - return Ok(Input::new()); - } - - let yml = fs::read_to_string(ir_input_file).map_err(|source| Error::FileIo { - path: ir_input_file.to_owned(), - source, - })?; - serde_yaml::from_str(&yml).map_err(Into::into) - } - - /// Compare this config to the saved config at the provided path. - fn has_changed(&self) -> bool { - let config_file = self.file(); - if !config_file.is_file() { - return true; - } - let yml = fs::read_to_string(config_file).expect("Unable to read config"); - match serde_yaml::from_str::(&yml) { - Ok(prior_config) => self != &prior_config, - Err(err) => { - log::warn!("Unable to parse prior config {err:#?}"); - true - } - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use filetime::FileTime; - use tempfile::tempdir; - - #[test] - fn detect_compiler_change() { - let temp_dir = tempdir().unwrap(); - let build_dir = temp_dir.path(); - let args = Args::for_test(build_dir, "wght_var.designspace"); - let mut config = Config::new(args).unwrap(); - - let compiler_location = std::env::current_exe().unwrap(); - let metadata = compiler_location.metadata().unwrap(); - // size +1, I'd give it all up for just a little more - config.compiler.set_file_state( - &compiler_location, - FileTime::from_system_time(metadata.modified().unwrap()), - metadata.len() + 1, - ); - assert!(config.has_changed()) - } - - #[test] - fn init_captures_state() { - let temp_dir = tempdir().unwrap(); - let build_dir = temp_dir.path(); - let args = Args::for_test(build_dir, "wght_var.designspace"); - let config = Config::new(args.clone()).unwrap(); - - config.init().unwrap(); - let paths = IrPaths::new(build_dir); - let ir_input_file = paths.ir_input_file(); - - assert!(config.file().exists(),); - assert!( - !ir_input_file.exists(), - "Should not exist: {ir_input_file:#?}" - ); - assert!(!Config::new(args).unwrap().has_changed()); - } -} diff --git a/fontc/src/error.rs b/fontc/src/error.rs index 6adb6a4e2..369fe5fe6 100644 --- a/fontc/src/error.rs +++ b/fontc/src/error.rs @@ -1,6 +1,5 @@ use std::{io, path::PathBuf}; -use fontir::error::TrackFileError; use thiserror::Error; #[derive(Debug, Error)] @@ -20,8 +19,6 @@ pub enum Error { #[error(transparent)] YamlSerError(#[from] serde_yaml::Error), #[error(transparent)] - TrackFile(#[from] TrackFileError), - #[error(transparent)] FontIrError(#[from] fontir::error::Error), #[error(transparent)] Backend(#[from] fontbe::error::Error), diff --git a/fontc/src/lib.rs b/fontc/src/lib.rs index 495e658f6..487269e67 100644 --- a/fontc/src/lib.rs +++ b/fontc/src/lib.rs @@ -1,61 +1,57 @@ //! A font compiler with aspirations of being fast and safe. mod args; -mod change_detector; -mod config; mod error; mod timing; pub mod work; mod workload; pub use args::Args; -pub use change_detector::ChangeDetector; -pub use config::Config; pub use error::Error; +use fontra2fontir::source::FontraIrSource; +use glyphs2fontir::source::GlyphsIrSource; pub use timing::{create_timer, JobTimer}; +use ufo2fontir::source::DesignSpaceIrSource; use workload::Workload; -use fontbe::{ - avar::create_avar_work, - cmap::create_cmap_work, - features::{ - create_gather_ir_kerning_work, create_kerns_work, create_mark_work, FeatureCompilationWork, - FeatureParsingWork, - }, - font::create_font_work, - fvar::create_fvar_work, - glyphs::{create_glyf_loca_work, create_glyf_work}, - gvar::create_gvar_work, - head::create_head_work, - hvar::create_hvar_work, - meta::create_meta_work, - metrics_and_limits::create_metric_and_limit_work, - mvar::create_mvar_work, - name::create_name_work, - orchestration::AnyWorkId, - os2::create_os2_work, - post::create_post_work, - stat::create_stat_work, -}; +use fontbe::orchestration::AnyWorkId; use std::{ + ffi::OsStr, fs::{self, OpenOptions}, io::BufWriter, path::Path, }; -use fontdrasil::{coords::NormalizedLocation, types::GlyphName}; use fontir::{ - glyph::create_glyph_order_work, orchestration::{Context as FeContext, Flags}, - source::DeleteWork, + source::Source, }; use fontbe::orchestration::Context as BeContext; use fontbe::paths::Paths as BePaths; use fontir::paths::Paths as IrPaths; -use log::{debug, warn}; +use log::debug; + +/// Creates the implementation of [`Source`] that should be used for the provided path +fn create_source(source: &Path) -> Result, Error> { + if !source.exists() { + return Err(Error::FileExpected(source.to_path_buf())); + } + let ext = source + .extension() + .and_then(OsStr::to_str) + .ok_or_else(|| Error::UnrecognizedSource(source.to_path_buf()))?; + match ext { + "designspace" => Ok(Box::new(DesignSpaceIrSource::new(source)?)), + "ufo" => Ok(Box::new(DesignSpaceIrSource::new(source)?)), + "glyphs" => Ok(Box::new(GlyphsIrSource::new(source)?)), + "glyphspackage" => Ok(Box::new(GlyphsIrSource::new(source)?)), + "fontra" => Ok(Box::new(FontraIrSource::new(source)?)), + _ => Err(Error::UnrecognizedSource(source.to_path_buf())), + } +} /// Run the compiler with the provided arguments pub fn run(args: Args, mut timer: JobTimer) -> Result<(), Error> { @@ -63,30 +59,16 @@ pub fn run(args: Args, mut timer: JobTimer) -> Result<(), Error> { .queued() .run(); let (ir_paths, be_paths) = init_paths(&args)?; - let config = Config::new(args)?; - let prev_inputs = config.init()?; timer.add(time.complete()); - let mut change_detector = ChangeDetector::new( - config.clone(), - ir_paths.clone(), - be_paths.clone(), - prev_inputs, - &mut timer, - )?; - - let workload = create_workload(&mut change_detector, timer)?; - - let fe_root = FeContext::new_root( - config.args.flags(), - ir_paths, - workload.current_inputs().clone(), - ); - let be_root = BeContext::new_root(config.args.flags(), be_paths, &fe_root); + let workload = Workload::new(args.clone(), timer)?; + + let fe_root = FeContext::new_root(args.flags(), ir_paths); + let be_root = BeContext::new_root(args.flags(), be_paths, &fe_root); let mut timing = workload.exec(&fe_root, &be_root)?; - if config.args.flags().contains(Flags::EMIT_TIMING) { - let path = config.args.build_dir.join("threads.svg"); + if args.flags().contains(Flags::EMIT_TIMING) { + let path = args.build_dir.join("threads.svg"); let out_file = OpenOptions::new() .write(true) .create(true) @@ -102,9 +84,8 @@ pub fn run(args: Args, mut timer: JobTimer) -> Result<(), Error> { .map_err(|source| Error::FileIo { path, source })?; } - change_detector.finish_successfully()?; - - write_font_file(&config.args, &be_root) + // At long last! + write_font_file(&args, &be_root) } pub fn require_dir(dir: &Path) -> Result<(), Error> { @@ -141,10 +122,10 @@ pub fn init_paths(args: &Args) -> Result<(IrPaths, BePaths), Error> { // the build dir stores the IR (for incremental builds) and the default output // file ('font.ttf') so we don't need to create one unless we're writing to it - if args.output_file.is_none() || args.incremental { + if args.output_file.is_none() || args.emit_ir { require_dir(&args.build_dir)?; } - if args.incremental { + if args.emit_ir { require_dir(ir_paths.anchor_ir_dir())?; require_dir(ir_paths.glyph_ir_dir())?; require_dir(be_paths.glyph_dir())?; @@ -165,7 +146,7 @@ pub fn init_paths(args: &Args) -> Result<(IrPaths, BePaths), Error> { pub fn write_font_file(args: &Args, be_context: &BeContext) -> Result<(), Error> { // if IR is off the font didn't get written yet (nothing did), otherwise it's done already let font_file = be_context.font_file(); - if !args.incremental { + if !args.emit_ir { fs::write(&font_file, be_context.font.get().get()).map_err(|source| Error::FileIo { path: font_file, source, @@ -176,367 +157,6 @@ pub fn write_font_file(args: &Args, be_context: &BeContext) -> Result<(), Error> Ok(()) } -fn add_glyph_order_ir_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_glyph_order_work().into(); - workload.add(work, workload.change_detector.glyph_order_ir_change()); - - Ok(()) -} - -fn add_feature_ir_job(workload: &mut Workload) -> Result<(), Error> { - let work = workload - .change_detector - .ir_source() - .create_feature_ir_work(workload.change_detector.current_inputs())? - .into(); - workload.add( - work, - workload.change_detector.feature_ir_change() - && !workload.change_detector.should_skip_features(), - ); - Ok(()) -} - -fn add_feature_parse_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = FeatureParsingWork::create(); - - // copied from below - if workload.change_detector.feature_be_change() { - if workload.change_detector.glyph_name_filter().is_some() { - warn!("Not processing BE Features because a glyph name filter is active"); - } - if workload.change_detector.should_skip_features() { - debug!("Not processing BE Features because FEA compilation is disabled"); - } - } - - workload.add( - work.into(), - workload.change_detector.feature_be_change() - && workload.change_detector.glyph_name_filter().is_none() - && !workload.change_detector.should_skip_features(), - ); - Ok(()) -} - -fn add_feature_comp_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = FeatureCompilationWork::create(); - // Features are extremely prone to not making sense when glyphs are filtered - if workload.change_detector.feature_be_change() { - if workload.change_detector.glyph_name_filter().is_some() { - warn!("Not processing BE Features because a glyph name filter is active"); - } - if workload.change_detector.should_skip_features() { - debug!("Not processing BE Features because FEA compilation is disabled"); - } - } - workload.add( - work.into(), - workload.change_detector.feature_be_change() - && workload.change_detector.glyph_name_filter().is_none() - && !workload.change_detector.should_skip_features(), - ); - Ok(()) -} - -fn add_marks_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_mark_work(); - // Features are extremely prone to not making sense when glyphs are filtered - if workload.change_detector.mark_be_change() { - if workload.change_detector.glyph_name_filter().is_some() { - warn!("Not processing BE marks because a glyph name filter is active"); - } - if workload.change_detector.should_skip_features() { - debug!("Not processing BE marks because FEA compilation is disabled"); - } - } - workload.add( - work.into(), - workload.change_detector.mark_be_change() - && workload.change_detector.glyph_name_filter().is_none() - && !workload.change_detector.should_skip_features(), - ); - Ok(()) -} - -fn add_gather_ir_kerning_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_gather_ir_kerning_work(); - // Features are extremely prone to not making sense when glyphs are filtered - if workload.change_detector.kerning_be_change() { - if workload.change_detector.glyph_name_filter().is_some() { - warn!("Not processing BE kerning because a glyph name filter is active"); - } - if workload.change_detector.should_skip_features() { - debug!("Not processing BE kerning because FEA compilation is disabled"); - } - } - workload.add( - work.into(), - workload.change_detector.kerning_be_change() - && workload.change_detector.glyph_name_filter().is_none() - && !workload.change_detector.should_skip_features(), - ); - Ok(()) -} - -fn add_kerns_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_kerns_work(); - // Features are extremely prone to not making sense when glyphs are filtered - if workload.change_detector.kerning_be_change() { - if workload.change_detector.glyph_name_filter().is_some() { - warn!("Not processing BE kerning because a glyph name filter is active"); - } - if workload.change_detector.should_skip_features() { - debug!("Not processing BE kerning because FEA compilation is disabled"); - } - } - workload.add( - work.into(), - workload.change_detector.kerning_be_change() - && workload.change_detector.glyph_name_filter().is_none() - && !workload.change_detector.should_skip_features(), - ); - Ok(()) -} - -fn add_kerning_group_ir_job(workload: &mut Workload) -> Result<(), Error> { - let work = workload - .change_detector - .ir_source() - .create_kerning_group_ir_work(workload.change_detector.current_inputs())?; - workload.add( - work.into(), - workload.change_detector.kerning_groups_ir_change() - && !workload.change_detector.should_skip_features(), - ); - Ok(()) -} - -fn add_kern_instance_ir_job(workload: &mut Workload, at: NormalizedLocation) -> Result<(), Error> { - let work = workload - .change_detector - .ir_source() - .create_kerning_instance_ir_work(workload.current_inputs(), at.clone())? - .into(); - workload.add( - work, - workload.change_detector.kerning_at_ir_change(at) - && !workload.change_detector.should_skip_features(), - ); - Ok(()) -} - -fn add_glyph_ir_jobs(workload: &mut Workload) -> Result<(), Error> { - // Destroy IR for deleted glyphs. No dependencies. - for glyph_name in workload.change_detector.glyphs_deleted().iter() { - let work = DeleteWork::create(glyph_name.clone()); - workload.add(work.into(), true); - } - - // Generate IR for changed glyphs - let glyphs_changed = workload.change_detector.glyphs_changed(); - let glyph_work = workload - .change_detector - .ir_source() - .create_glyph_ir_work(glyphs_changed, workload.change_detector.current_inputs())?; - for work in glyph_work { - workload.add(work.into(), true); - } - - Ok(()) -} - -fn add_glyph_be_jobs(workload: &mut Workload) -> Result<(), Error> { - let glyphs_changed = workload.change_detector.glyphs_changed(); - for glyph_name in glyphs_changed { - add_glyph_be_job(workload, glyph_name.clone()); - } - Ok(()) -} - -fn add_glyf_loca_be_job(workload: &mut Workload) -> Result<(), Error> { - let glyphs_changed = workload.change_detector.glyphs_changed(); - - // If no glyph has changed there isn't a lot of merging to do - let work = create_glyf_loca_work().into(); - workload.add(work, !glyphs_changed.is_empty()); - - Ok(()) -} - -fn add_avar_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_avar_work().into(); - workload.add(work, workload.change_detector.avar_be_change()); - Ok(()) -} - -fn add_stat_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_stat_work().into(); - workload.add(work, workload.change_detector.stat_be_change()); - Ok(()) -} - -fn add_meta_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_meta_work().into(); - workload.add(work, workload.change_detector.static_metadata_ir_change()); - Ok(()) -} - -fn add_fvar_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_fvar_work().into(); - workload.add(work, workload.change_detector.fvar_be_change()); - Ok(()) -} - -fn add_gvar_be_job(workload: &mut Workload) -> Result<(), Error> { - let glyphs_changed = workload.change_detector.glyphs_changed(); - - // If no glyph has changed there isn't a lot of merging to do - let work = create_gvar_work().into(); - workload.add(work, !glyphs_changed.is_empty()); - - Ok(()) -} - -fn add_cmap_be_job(workload: &mut Workload) -> Result<(), Error> { - let glyphs_changed = workload.change_detector.glyphs_changed(); - - // If no glyph has changed there isn't a lot of merging to do - let work = create_cmap_work().into(); - workload.add(work, !glyphs_changed.is_empty()); - - Ok(()) -} - -fn add_post_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_post_work().into(); - workload.add(work, workload.change_detector.post_be_change()); - Ok(()) -} - -fn add_head_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_head_work().into(); - workload.add(work, workload.change_detector.glyph_order_ir_change()); - Ok(()) -} - -fn add_name_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_name_work().into(); - workload.add(work, workload.change_detector.static_metadata_ir_change()); - Ok(()) -} - -fn add_os2_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_os2_work().into(); - workload.add(work, workload.change_detector.static_metadata_ir_change()); - Ok(()) -} - -fn add_metric_and_limits_job(workload: &mut Workload) -> Result<(), Error> { - let glyphs_changed = workload.change_detector.glyphs_changed(); - - // If no glyph has changed there isn't a lot to do - let work = create_metric_and_limit_work().into(); - workload.add(work, !glyphs_changed.is_empty()); - Ok(()) -} - -fn add_hvar_be_job(workload: &mut Workload) -> Result<(), Error> { - let glyphs_changed = workload.change_detector.glyphs_changed(); - - let work = create_hvar_work().into(); - workload.add( - work, - // Static metadata contains VariationModel, if axes coordinates change - // we need to recompute variable tables such as HVAR. - // Glyph order matters because HVAR may choose to store variation items - // directly mapping to glyph indices. And glyph IR contains advance width - // among other things. - // TODO: ideally be more granular here e.g. by storing axes and advance widths - // in a separate IR file. - // https://github.com/googlefonts/fontc/issues/526 - workload.change_detector.static_metadata_ir_change() - || workload.change_detector.glyph_order_ir_change() - || !glyphs_changed.is_empty(), - ); - Ok(()) -} - -fn add_mvar_be_job(workload: &mut Workload) -> Result<(), Error> { - let work = create_mvar_work().into(); - workload.add( - work, - workload.change_detector.static_metadata_ir_change() - || workload.change_detector.global_metrics_ir_change(), - ); - Ok(()) -} - -fn add_font_be_job(workload: &mut Workload) -> Result<(), Error> { - let glyphs_changed = workload.change_detector.glyphs_changed(); - - // If glyphs or features changed we better do the thing - let work = create_font_work(); - workload.add( - work.into(), - !glyphs_changed.is_empty() || workload.change_detector.feature_be_change(), - ); - Ok(()) -} - -fn add_glyph_be_job(workload: &mut Workload, glyph_name: GlyphName) { - let work = create_glyf_work(glyph_name).into(); - let should_run = workload.change_detector.simple_should_run(&work); - workload.add(work, should_run); -} - -//FIXME: I should be a method on ChangeDetector -pub fn create_workload( - change_detector: &mut ChangeDetector, - timer: JobTimer, -) -> Result { - let time = create_timer(AnyWorkId::InternalTiming("Create workload"), 0) - .queued() - .run(); - let mut workload = change_detector.create_workload(timer)?; - - // FE: f(source) => IR - add_feature_ir_job(&mut workload)?; - add_kerning_group_ir_job(&mut workload)?; - add_glyph_ir_jobs(&mut workload)?; - add_glyph_order_ir_job(&mut workload)?; - - // BE: f(IR, maybe other BE work) => binary - - add_feature_parse_be_job(&mut workload)?; - add_feature_comp_be_job(&mut workload)?; - add_glyph_be_jobs(&mut workload)?; - add_glyf_loca_be_job(&mut workload)?; - add_avar_be_job(&mut workload)?; - add_stat_be_job(&mut workload)?; - add_meta_be_job(&mut workload)?; - add_cmap_be_job(&mut workload)?; - add_fvar_be_job(&mut workload)?; - add_gvar_be_job(&mut workload)?; - add_head_be_job(&mut workload)?; - add_gather_ir_kerning_be_job(&mut workload)?; - add_kerns_be_job(&mut workload)?; - add_marks_be_job(&mut workload)?; - add_metric_and_limits_job(&mut workload)?; - add_hvar_be_job(&mut workload)?; - add_mvar_be_job(&mut workload)?; - add_name_be_job(&mut workload)?; - add_os2_be_job(&mut workload)?; - add_post_be_job(&mut workload)?; - - // Make a damn font - add_font_be_job(&mut workload)?; - - workload.timer.add(time.complete()); - - Ok(workload) -} - #[cfg(test)] pub fn testdata_dir() -> std::path::PathBuf { // cargo test seems to run in the project directory @@ -569,7 +189,7 @@ mod tests { AnyWorkId, Context as BeContext, Glyph, LocaFormatWrapper, WorkId as BeWorkIdentifier, }; use fontdrasil::{ - coords::NormalizedCoord, + coords::{NormalizedCoord, NormalizedLocation}, paths::string_to_filename, types::{GlyphName, WidthClass}, }; @@ -577,7 +197,6 @@ mod tests { ir::{self, GlobalMetric, GlyphOrder, KernGroup, KernPair, KernSide}, orchestration::{Context as FeContext, Persistable, WorkId as FeWorkIdentifier}, }; - use indexmap::IndexSet; use kurbo::{Point, Rect}; use log::info; use pretty_assertions::assert_eq; @@ -630,10 +249,7 @@ mod tests { /// the directory is deleted. _temp_dir: TempDir, build_dir: PathBuf, - args: Args, work_executed: HashSet, - glyphs_changed: IndexSet, - glyphs_deleted: IndexSet, fe_context: FeContext, be_context: BeContext, raw_font: Vec, @@ -644,14 +260,8 @@ mod tests { TestCompile::compile(source, |args| args) } - fn compile_again(prior_compile: &TestCompile) -> TestCompile { - TestCompile::compile(prior_compile.args.source().to_str().unwrap(), |_| { - prior_compile.args.clone() - }) - } - fn compile(source: &str, adjust_args: impl Fn(Args) -> Args) -> TestCompile { - let mut timer = JobTimer::new(Instant::now()); + let timer = JobTimer::new(Instant::now()); let _ = env_logger::builder().is_test(true).try_init(); let temp_dir = tempdir().unwrap(); @@ -661,46 +271,26 @@ mod tests { info!("Compile {args:?}"); let (ir_paths, be_paths) = init_paths(&args).unwrap(); - let config = Config::new(args).unwrap(); - let prev_inputs = config.init().unwrap(); - - let mut change_detector = ChangeDetector::new( - config.clone(), - ir_paths.clone(), - be_paths.clone(), - prev_inputs, - &mut timer, - ) - .unwrap(); - let build_dir = change_detector.be_paths().build_dir().to_path_buf(); + let build_dir = be_paths.build_dir().to_path_buf(); - let fe_context = FeContext::new_root( - config.args.flags(), - ir_paths, - change_detector.current_inputs().clone(), - ); - let be_context = - BeContext::new_root(config.args.flags(), be_paths, &fe_context.read_only()); + let fe_context = FeContext::new_root(args.flags(), ir_paths); + let be_context = BeContext::new_root(args.flags(), be_paths, &fe_context.read_only()); let mut result = TestCompile { _temp_dir: temp_dir, build_dir, - args: config.args.clone(), work_executed: HashSet::new(), - glyphs_changed: change_detector.glyphs_changed().clone(), - glyphs_deleted: change_detector.glyphs_deleted().clone(), fe_context, be_context, raw_font: Vec::new(), }; - let mut workload = create_workload(&mut change_detector, timer).unwrap(); + let mut workload = Workload::new(args.clone(), timer).unwrap(); let completed = workload.run_for_test(&result.fe_context, &result.be_context); - change_detector.finish_successfully().unwrap(); result.work_executed = completed; - write_font_file(&config.args, &result.be_context).unwrap(); + write_font_file(&args, &result.be_context).unwrap(); result.raw_font = fs::read(result.build_dir.join("font.ttf")).unwrap(); @@ -743,8 +333,8 @@ mod tests { &mut File::open(build_dir.join("loca.format")).unwrap(), ) .into(), - raw_glyf: read_file(&build_dir.join("glyf.table")), - raw_loca: read_file(&build_dir.join("loca.table")), + raw_glyf: read_file(build_dir, Path::new("glyf.table")), + raw_loca: read_file(build_dir, Path::new("loca.table")), } } @@ -762,39 +352,6 @@ mod tests { } } - /// Copy testdata => tempdir so the test can modify it - fn copy_testdata(from: impl IntoIterator>, to_dir: &Path) { - let from_dir = testdata_dir(); - - let mut from: VecDeque = from.into_iter().map(|p| from_dir.join(p)).collect(); - while let Some(source) = from.pop_back() { - let rel_source = source.strip_prefix(&from_dir).unwrap(); - let dest = to_dir.join(rel_source); - assert!( - source.exists(), - "cannot copy '{source:?}'; it doesn't exist" - ); - - let dest_dir = if source.is_dir() { - dest.as_path() - } else { - dest.parent().unwrap() - }; - if !dest_dir.exists() { - fs::create_dir_all(dest_dir).unwrap(); - } - - if source.is_file() { - fs::copy(&source, &dest).unwrap(); - } - if source.is_dir() { - for entry in fs::read_dir(source).unwrap() { - from.push_back(entry.unwrap().path()); - } - } - } - } - fn assert_compile_work(source: &str, glyphs: Vec<&str>) { let result = TestCompile::compile_source(source); let mut completed = result.work_executed.iter().cloned().collect::>(); @@ -890,114 +447,6 @@ mod tests { ) } - #[test] - fn second_compile_is_nop() { - let result = TestCompile::compile_source("wght_var.designspace"); - assert_eq!( - IndexSet::from(["bar".into(), "plus".into()]), - result.glyphs_changed - ); - assert!(result.glyphs_deleted.is_empty()); - - let result = TestCompile::compile_again(&result); - assert!(result.work_executed.is_empty()); - assert!(result.glyphs_changed.is_empty()); - assert!(result.glyphs_deleted.is_empty()); - } - - #[test] - fn second_compile_only_glyph_ir() { - // glyph depends on static metadata, which isn't going to run - let result = TestCompile::compile_source("wght_var.designspace"); - assert!(result.work_executed.len() > 1); - - let glyph_ir_file = result.build_dir.join("glyph_ir/bar.yml"); - fs::remove_file(&glyph_ir_file).unwrap(); - - let result = TestCompile::compile_again(&result); - assert!(glyph_ir_file.exists(), "{glyph_ir_file:?}"); - let mut completed = result.work_executed.iter().cloned().collect::>(); - completed.sort(); - assert_eq!( - vec![ - AnyWorkId::Fe(FeWorkIdentifier::Glyph("bar".into())), - FeWorkIdentifier::Anchor("bar".into()).into(), - BeWorkIdentifier::Features.into(), - BeWorkIdentifier::FeaturesAst.into(), - BeWorkIdentifier::Cmap.into(), - BeWorkIdentifier::Font.into(), - BeWorkIdentifier::Glyf.into(), - BeWorkIdentifier::Gpos.into(), - BeWorkIdentifier::Gsub.into(), - BeWorkIdentifier::Gdef.into(), - BeWorkIdentifier::Gvar.into(), - BeWorkIdentifier::Hhea.into(), - BeWorkIdentifier::Hmtx.into(), - BeWorkIdentifier::Hvar.into(), - BeWorkIdentifier::Loca.into(), - BeWorkIdentifier::LocaFormat.into(), - BeWorkIdentifier::Marks.into(), - BeWorkIdentifier::Maxp.into(), - BeWorkIdentifier::ExtraFeaTables.into(), - ], - completed, - "{completed:#?}" - ); - } - - #[test] - fn second_compile_only_kerning() { - let result = TestCompile::compile_source("glyphs3/WghtVar.glyphs"); - assert!(result.work_executed.len() > 1); - - fs::remove_file(result.build_dir.join("kern_groups.yml")).unwrap(); - fs::remove_file(result.build_dir.join("kern_wght_0.00.yml")).unwrap(); - fs::remove_file(result.build_dir.join("kern_wght_1.00.yml")).unwrap(); - - let result = TestCompile::compile_again(&result); - let mut completed = result.work_executed.iter().cloned().collect::>(); - completed.sort(); - assert_eq!( - vec![ - AnyWorkId::Fe(FeWorkIdentifier::Features), - FeWorkIdentifier::KerningGroups.into(), - FeWorkIdentifier::KernInstance(NormalizedLocation::for_pos(&[("wght", 0.0)])) - .into(), - FeWorkIdentifier::KernInstance(NormalizedLocation::for_pos(&[("wght", 1.0)])) - .into(), - BeWorkIdentifier::Features.into(), - BeWorkIdentifier::FeaturesAst.into(), - BeWorkIdentifier::Font.into(), - BeWorkIdentifier::Gpos.into(), - BeWorkIdentifier::Gsub.into(), - BeWorkIdentifier::Gdef.into(), - BeWorkIdentifier::GatherIrKerning.into(), - BeWorkIdentifier::KernFragment(0).into(), - BeWorkIdentifier::GatherBeKerning.into(), - BeWorkIdentifier::ExtraFeaTables.into(), - ], - completed - ); - } - - #[test] - fn deleted_ir_recreated() { - let result = TestCompile::compile_source("wght_var.designspace"); - assert_eq!( - IndexSet::from(["bar".into(), "plus".into()]), - result.glyphs_changed - ); - assert!(result.glyphs_deleted.is_empty()); - - let bar_ir = result.build_dir.join("glyph_ir/bar.yml"); - assert!(bar_ir.is_file(), "no file {bar_ir:#?}"); - fs::remove_file(bar_ir).unwrap(); - - let result = TestCompile::compile_again(&result); - assert_eq!(IndexSet::from(["bar".into()]), result.glyphs_changed); - assert!(result.glyphs_deleted.is_empty()); - } - fn assert_compiles_with_gpos_and_gsub( source: &str, adjust_args: impl Fn(Args) -> Args, @@ -1029,72 +478,11 @@ mod tests { fn compile_fea_with_includes_no_ir() { assert_compiles_with_gpos_and_gsub("fea_include.designspace", |mut args| { args.emit_debug = false; - args.incremental = false; + args.emit_ir = false; args }); } - #[test] - fn compile_fea_again_with_modified_include() { - let temp_dir = tempdir().unwrap(); - let source_dir = temp_dir.path().join("sources"); - let build_dir = temp_dir.path().join("build"); - fs::create_dir(&source_dir).unwrap(); - fs::create_dir(build_dir).unwrap(); - - copy_testdata( - [ - "fea_include.designspace", - "fea_include_ufo/common.fea", - "fea_include_ufo/FeaInc-Regular.ufo", - "fea_include_ufo/FeaInc-Bold.ufo", - ], - &source_dir, - ); - - let source = source_dir - .join("fea_include.designspace") - .canonicalize() - .unwrap(); - let result = TestCompile::compile_source(source.to_str().unwrap()); - - let shared_fea = source_dir.join("fea_include_ufo/common.fea"); - fs::write( - &shared_fea, - fs::read_to_string(&shared_fea).unwrap() + "\n\nfeature k2 { pos bar bar -100; } k2;", - ) - .unwrap(); - - let result = TestCompile::compile_again(&result); - - // Features and things downstream of features should rebuild - let mut completed = result.work_executed.iter().cloned().collect::>(); - completed.sort(); - assert_eq!( - vec![ - AnyWorkId::Fe(FeWorkIdentifier::Features), - AnyWorkId::Fe(FeWorkIdentifier::KerningGroups), - AnyWorkId::Fe(FeWorkIdentifier::KernInstance(NormalizedLocation::for_pos( - &[("wght", 0.0)] - ))), - AnyWorkId::Fe(FeWorkIdentifier::KernInstance(NormalizedLocation::for_pos( - &[("wght", 1.0)] - ))), - BeWorkIdentifier::Features.into(), - BeWorkIdentifier::FeaturesAst.into(), - BeWorkIdentifier::Font.into(), - BeWorkIdentifier::Gpos.into(), - BeWorkIdentifier::Gsub.into(), - BeWorkIdentifier::Gdef.into(), - BeWorkIdentifier::GatherIrKerning.into(), - BeWorkIdentifier::KernFragment(0).into(), - BeWorkIdentifier::GatherBeKerning.into(), - BeWorkIdentifier::ExtraFeaTables.into(), - ], - completed - ); - } - fn build_contour_and_composite_glyph(prefer_simple_glyphs: bool) -> (TestCompile, ir::Glyph) { let result = TestCompile::compile("glyphs2/MixedContourComponent.glyphs", |mut args| { args.prefer_simple_glyphs = prefer_simple_glyphs; // <-- important :) @@ -1110,7 +498,23 @@ mod tests { (result, (*glyph).clone()) } - fn read_file(path: &Path) -> Vec { + fn read_file(build_dir: &Path, path: &Path) -> Vec { + assert!(build_dir.is_dir(), "{build_dir:?} isn't a directory?!"); + let path = build_dir.join(path); + if !path.exists() { + // When a path is missing it's very helpful to know what's present + use std::io::Write; + let mut stderr = std::io::stderr().lock(); + writeln!(stderr, "Build dir tree").unwrap(); + let mut pending = VecDeque::new(); + pending.push_back(build_dir); + while let Some(pending_dir) = pending.pop_front() { + for entry in fs::read_dir(pending_dir).unwrap() { + let entry = entry.unwrap(); + writeln!(stderr, "{}", entry.path().to_str().unwrap()).unwrap(); + } + } + } assert!(path.exists(), "{path:?} not found"); let mut buf = Vec::new(); File::open(path).unwrap().read_to_end(&mut buf).unwrap(); @@ -1119,18 +523,16 @@ mod tests { fn read_ir_glyph(build_dir: &Path, name: &str) -> ir::Glyph { let raw_glyph = read_file( - &build_dir - .join("glyph_ir") - .join(string_to_filename(name, ".yml")), + build_dir, + &Path::new("glyph_ir").join(string_to_filename(name, ".yml")), ); ir::Glyph::read(&mut raw_glyph.as_slice()) } fn read_be_glyph(build_dir: &Path, name: &str) -> RawGlyph { let raw_glyph = read_file( - &build_dir - .join("glyphs") - .join(string_to_filename(name, ".glyf")), + build_dir, + &Path::new("glyphs").join(string_to_filename(name, ".glyf")), ); let read: &mut dyn Read = &mut raw_glyph.as_slice(); Glyph::read(read).data @@ -1710,7 +1112,7 @@ mod tests { #[test] fn compile_without_ir() { let result = TestCompile::compile("glyphs2/WghtVar.glyphs", |mut args| { - args.incremental = false; + args.emit_ir = false; args }); diff --git a/fontc/src/timing.rs b/fontc/src/timing.rs index 4af2c9338..ceda2af5b 100644 --- a/fontc/src/timing.rs +++ b/fontc/src/timing.rs @@ -126,7 +126,6 @@ fn short_name(id: &AnyWorkId) -> &'static str { AnyWorkId::Fe(FeWorkIdentifier::Features) => "fea", AnyWorkId::Fe(FeWorkIdentifier::GlobalMetrics) => "metrics", AnyWorkId::Fe(FeWorkIdentifier::Glyph(..)) => "glyph", - AnyWorkId::Fe(FeWorkIdentifier::GlyphIrDelete(..)) => "rm ir", AnyWorkId::Fe(FeWorkIdentifier::GlyphOrder) => "glyphorder", AnyWorkId::Fe(FeWorkIdentifier::KerningGroups) => "kerngrps", AnyWorkId::Fe(FeWorkIdentifier::KernInstance(..)) => "kernat", diff --git a/fontc/src/work.rs b/fontc/src/work.rs index fbfc8bc50..a3cea7e17 100644 --- a/fontc/src/work.rs +++ b/fontc/src/work.rs @@ -13,6 +13,10 @@ use crate::Error; pub enum AnyWork { Fe(Box), Be(Box), + /// Used to get things marked completed w/o actually running. Skipping basically. + Nop(AnyWorkId, AnyAccess), + /// Used for work that isn't actually run, just gets marked complete on completion of some other work + AlsoComplete(AnyWorkId, AnyAccess), } impl From> for AnyWork { @@ -32,6 +36,7 @@ impl AnyWork { match self { AnyWork::Be(work) => work.id(), AnyWork::Fe(work) => work.id().into(), + AnyWork::Nop(id, ..) | AnyWork::AlsoComplete(id, ..) => id.clone(), } } @@ -39,6 +44,7 @@ impl AnyWork { match self { AnyWork::Be(work) => work.read_access().into(), AnyWork::Fe(work) => work.read_access().into(), + AnyWork::Nop(.., access) | AnyWork::AlsoComplete(.., access) => access.clone(), } } @@ -46,6 +52,7 @@ impl AnyWork { match self { AnyWork::Be(work) => work.write_access().into(), AnyWork::Fe(work) => work.write_access().into(), + AnyWork::Nop(..) | AnyWork::AlsoComplete(..) => AnyAccess::Fe(Access::None), } } @@ -57,6 +64,7 @@ impl AnyWork { .into_iter() .map(|id| id.into()) .collect(), + AnyWork::Nop(..) | AnyWork::AlsoComplete(..) => Vec::new(), } } @@ -64,6 +72,7 @@ impl AnyWork { match self { AnyWork::Be(work) => work.exec(context.unwrap_be()).map_err(|e| e.into()), AnyWork::Fe(work) => work.exec(context.unwrap_fe()).map_err(|e| e.into()), + AnyWork::Nop(..) | AnyWork::AlsoComplete(..) => Ok(()), } } } diff --git a/fontc/src/workload.rs b/fontc/src/workload.rs index 3a89d029e..63b03f737 100644 --- a/fontc/src/workload.rs +++ b/fontc/src/workload.rs @@ -11,8 +11,26 @@ use std::{ use crossbeam_channel::{Receiver, TryRecvError}; use fontbe::{ - features::create_kern_segment_work, + avar::create_avar_work, + cmap::create_cmap_work, + features::{ + create_gather_ir_kerning_work, create_kern_segment_work, create_kerns_work, + create_mark_work, FeatureCompilationWork, FeatureParsingWork, + }, + font::create_font_work, + fvar::create_fvar_work, + glyphs::{create_glyf_loca_work, create_glyf_work}, + gvar::create_gvar_work, + head::create_head_work, + hvar::create_hvar_work, + meta::create_meta_work, + metrics_and_limits::create_metric_and_limit_work, + mvar::create_mvar_work, + name::create_name_work, orchestration::{AnyWorkId, Context as BeContext, WorkId as BeWorkIdentifier}, + os2::create_os2_work, + post::create_post_work, + stat::create_stat_work, }; use fontdrasil::{ coords::NormalizedLocation, @@ -20,21 +38,23 @@ use fontdrasil::{ types::GlyphName, }; use fontir::{ + glyph::create_glyph_order_work, orchestration::{Context as FeContext, WorkId as FeWorkIdentifier}, - source::Input, + source::Source, }; use log::{debug, trace, warn}; use crate::{ + create_source, timing::{create_timer, JobTime, JobTimeQueued, JobTimer}, work::{AnyAccess, AnyContext, AnyWork}, - ChangeDetector, Error, + Args, Error, }; /// A set of interdependent jobs to execute. -#[derive(Debug)] -pub struct Workload<'a> { - pub(crate) change_detector: &'a ChangeDetector, +pub struct Workload { + args: Args, + source: Box, job_count: usize, success: HashSet, error: Option, @@ -56,13 +76,11 @@ pub struct Workload<'a> { pub(crate) struct Job { pub(crate) id: AnyWorkId, // The actual task. Exec takes work and sets the running flag. - pub(crate) work: Option, + pub(crate) work: AnyWork, // Things our job needs read access to. Job won't run if anything it can read is pending. pub(crate) read_access: AnyAccess, // Things our job needs write access to pub(crate) write_access: AnyAccess, - // Does this job actually need to execute? - pub(crate) run: bool, // is this job running right now? pub(crate) running: bool, } @@ -93,10 +111,23 @@ fn priority(id: &AnyWorkId) -> u32 { } } -impl<'a> Workload<'a> { - pub fn new(change_detector: &'a ChangeDetector, timer: JobTimer) -> Workload<'a> { - Workload { - change_detector, +impl Workload { + // Pass in timer to enable t0 to be as early as possible + pub fn new(args: Args, mut timer: JobTimer) -> Result { + let time = create_timer(AnyWorkId::InternalTiming("create_source"), 0) + .queued() + .run(); + + let source = create_source(args.source())?; + + timer.add(time.complete()); + let time = create_timer(AnyWorkId::InternalTiming("Create workload"), 0) + .queued() + .run(); + + let mut workload = Self { + args, + source, job_count: 0, success: Default::default(), error: Default::default(), @@ -105,6 +136,69 @@ impl<'a> Workload<'a> { jobs_pending: Default::default(), count_pending: Default::default(), timer, + }; + + // Create work roughly in the order it would typically occur + // Work is eligible to run as soon as all dependencies are complete + // so this is NOT the definitive execution order + + // FE: f(source) => IR + workload.add(workload.source.create_static_metadata_work()?); + workload.add(workload.source.create_global_metric_work()?); + workload.add(workload.source.create_feature_ir_work()?); + workload.add_skippable_feature_work(workload.source.create_kerning_group_ir_work()?); + workload + .source + .create_glyph_ir_work()? + .into_iter() + .for_each(|w| workload.add(w)); + workload.add(create_glyph_order_work()); + + // BE: f(IR, maybe other BE work) => binary + workload.add_skippable_feature_work(FeatureParsingWork::create()); + workload.add_skippable_feature_work(FeatureCompilationWork::create()); + let ir_glyphs = workload + .jobs_pending + .keys() + .filter_map(|id| match id { + AnyWorkId::Fe(FeWorkIdentifier::Glyph(name)) => Some(name.clone()), + _ => None, + }) + .collect::>(); + for glyph_name in ir_glyphs { + workload.add(create_glyf_work(glyph_name)) + } + workload.add(create_glyf_loca_work()); + workload.add(create_avar_work()); + workload.add(create_stat_work()); + workload.add(create_meta_work()); + workload.add(create_cmap_work()); + workload.add(create_fvar_work()); + workload.add(create_gvar_work()); + workload.add(create_head_work()); + workload.add_skippable_feature_work(create_gather_ir_kerning_work()); + workload.add_skippable_feature_work(create_kerns_work()); + workload.add_skippable_feature_work(create_mark_work()); + workload.add(create_metric_and_limit_work()); + workload.add(create_hvar_work()); + workload.add(create_mvar_work()); + workload.add(create_name_work()); + workload.add(create_os2_work()); + workload.add(create_post_work()); + + // Make a damn font + workload.add(create_font_work()); + + workload.timer.add(time.complete()); + + Ok(workload) + } + + fn add_skippable_feature_work(&mut self, work: impl Into) { + if !self.args.skip_features { + self.add(work); + } else { + self.skip(work); } } @@ -120,81 +214,81 @@ impl<'a> Workload<'a> { result } - pub fn current_inputs(&self) -> &Input { - self.change_detector.current_inputs() - } - - // TODO flags would be clearer than multiple bools - pub(crate) fn add(&mut self, work: AnyWork, should_run: bool) { + pub(crate) fn add(&mut self, work: impl Into) { + let work = work.into(); let id = work.id(); let read_access = work.read_access(); let write_access = work.write_access(); - self.insert( - id.clone(), - Job { - id, - work: Some(work), - read_access, - write_access, - run: should_run, - running: false, - }, - ); + self.insert(Job { + id, + work, + read_access, + write_access, + running: false, + }); } - pub(crate) fn insert(&mut self, id: AnyWorkId, job: Job) { - let run = job.run; - let also_completes = job - .work - .as_ref() - .unwrap_or_else(|| panic!("{id:?} submitted without work")) - .also_completes(); + /// Do all the task dependency bookkeeping but don't actually run the wokr + pub(crate) fn skip(&mut self, work: impl Into) { + let work: AnyWork = work.into(); + let id = work.id(); + self.insert_nop(id, work.read_access()); + } - // We need pending entries for also-completes items so dependencies on them work - for id in also_completes.iter() { - self.jobs_pending.insert( - id.clone(), - Job { - id: id.clone(), - work: None, // we exist solely to be marked complete by also_complete - read_access: job.read_access.clone(), // We don't want to be deemed runnable prematurely - write_access: AnyAccess::Be(Access::None), - run: true, - running: false, - }, - ); - self.count_pending - .entry(id.discriminant()) - .or_default() - .fetch_add(1, Ordering::AcqRel); - } + fn insert_with_bookkeeping(&mut self, job: Job) { + trace!( + "insert_job {}{:?} dependencies {:?}", + matches!(job.work, AnyWork::AlsoComplete(..)) + .then_some("(nop) ") + .unwrap_or(""), + job.id, + job.read_access + ); - self.job_count += 1 + also_completes.len(); - self.jobs_pending.insert(id.clone(), job); + self.job_count += 1; self.count_pending - .entry(id.discriminant()) + .entry(job.id.discriminant()) .or_default() .fetch_add(1, Ordering::AcqRel); + self.jobs_pending.insert(job.id.clone(), job); + } + + fn insert_nop(&mut self, id: AnyWorkId, read_access: AnyAccess) { + self.insert_with_bookkeeping(Job { + id: id.clone(), + work: AnyWork::Nop(id.clone(), read_access.clone()), + read_access, // We don't want to be deemed runnable prematurely + write_access: AnyAccess::Fe(Access::None), + running: false, + }); + } + pub(crate) fn insert(&mut self, job: Job) { + let also_completes = job.work.also_completes(); + + // We need pending entries for also-completes items so dependencies on them work + for id in also_completes.iter() { + self.insert_with_bookkeeping(Job { + id: id.clone(), + work: AnyWork::AlsoComplete(id.clone(), job.read_access.clone()), + read_access: job.read_access.clone(), // We don't want to be deemed runnable prematurely + write_access: AnyAccess::Fe(Access::None), + running: false, + }); + } if !also_completes.is_empty() { - self.also_completes.insert(id.clone(), also_completes); + self.also_completes.insert(job.id.clone(), also_completes); } - if !run { - for counter in self.counters(&id) { - counter.fetch_sub(1, Ordering::AcqRel); - } - self.mark_also_completed(&id); - self.complete_one(id, true); - } + self.insert_with_bookkeeping(job); } - fn complete_one(&mut self, id: AnyWorkId, expected_pending: bool) { - let was_pending = self.jobs_pending.remove(&id).is_some(); - if expected_pending && !was_pending { - panic!("Unable to complete {id:?}"); + fn complete_one(&mut self, id: AnyWorkId) { + trace!("complete_one {id:?}"); + if self.jobs_pending.remove(&id).is_none() { + panic!("{id:?} completed but isn't pending!"); } - if expected_pending && !self.success.insert(id.clone()) { + if !self.success.insert(id.clone()) { panic!("Multiple completions of {id:?}"); } } @@ -204,7 +298,7 @@ impl<'a> Workload<'a> { return; }; for id in also_completed { - self.complete_one(id, true); + self.complete_one(id); } } @@ -233,7 +327,7 @@ impl<'a> Workload<'a> { for counter in self.counters(&be_id) { counter.fetch_sub(1, Ordering::AcqRel); } - self.complete_one(be_id.clone(), true); + self.complete_one(be_id.clone()); self.mark_also_completed(&be_id); return; } @@ -272,7 +366,7 @@ impl<'a> Workload<'a> { self.timer.add(timing); - self.complete_one(success.clone(), false); + self.complete_one(success.clone()); self.mark_also_completed(&success); // When glyph order finalizes, add BE work for any new glyphs @@ -284,7 +378,7 @@ impl<'a> Workload<'a> { .difference(&preliminary_glyph_order) { debug!("Generating a BE job for {glyph_name}"); - super::add_glyph_be_job(self, glyph_name.clone()); + self.add(create_glyf_work(glyph_name.clone())); // Glyph order is done so all IR must be done. Copy dependencies from the IR for the same name. self.update_be_glyph_work(fe_root, glyph_name.clone()); @@ -292,9 +386,13 @@ impl<'a> Workload<'a> { } if let AnyWorkId::Fe(FeWorkIdentifier::KerningGroups) = success { - let groups = fe_root.kerning_groups.get(); - for location in groups.locations.iter() { - super::add_kern_instance_ir_job(self, location.clone())?; + if let Some(groups) = fe_root.kerning_groups.try_get() { + for location in groups.locations.iter() { + self.add( + self.source + .create_kerning_instance_ir_work(location.clone())?, + ); + } } // https://github.com/googlefonts/fontc/pull/655: don't set read access on GatherIrKerning until we spawn kern instance tasks @@ -310,11 +408,12 @@ impl<'a> Workload<'a> { } if let AnyWorkId::Be(BeWorkIdentifier::GatherIrKerning) = success { - let kern_pairs = be_root.all_kerning_pairs.get(); - for work in create_kern_segment_work(&kern_pairs) { - self.add(work.into(), true); + // When we skip features there are no kern pairs at all + if let Some(kern_pairs) = be_root.all_kerning_pairs.try_get() { + for work in create_kern_segment_work(&kern_pairs) { + self.add(work); + } } - // https://github.com/googlefonts/fontc/issues/647: it is now safe to set read access on segment gathering self.jobs_pending .get_mut(&AnyWorkId::Be(BeWorkIdentifier::GatherBeKerning)) @@ -406,7 +505,8 @@ impl<'a> Workload<'a> { launchable.clear(); for id in self.jobs_pending.iter().filter_map(|(id, job)| { - (job.work.is_some() && !job.running && self.can_run(job)).then_some(id) + (!matches!(job.work, AnyWork::AlsoComplete(..)) && !job.running && self.can_run(job)) + .then_some(id) }) { launchable.push(id.clone()); } @@ -488,8 +588,6 @@ impl<'a> Workload<'a> { .queued() .run(); - // count of launchable jobs that are runnable, i.e. excluding !run jobs - let mut runnable_count = launchable.len(); { let mut run_queue = run_queue.lock().unwrap(); @@ -499,23 +597,9 @@ impl<'a> Workload<'a> { let job = self.jobs_pending.get_mut(id).unwrap(); log::trace!("Start {:?}", id); job.running = true; - if !job.run { - if let Err(e) = - send.send((id.clone(), Ok(()), JobTime::nop(id.clone()))) - { - log::error!( - "Unable to write nop {id:?} to completion channel: {e}" - ); - //FIXME: if we can't send messages it means the receiver has dropped, - //which means we should... return? abort? - } - runnable_count -= 1; - continue; - } - let work = job - .work - .take() - .expect("{id:?} ready to run but has no work?!"); + + let mut work = AnyWork::AlsoComplete(id.clone(), job.read_access.clone()); + std::mem::swap(&mut job.work, &mut work); let work_context = AnyContext::for_work( fe_root, be_root, @@ -539,7 +623,7 @@ impl<'a> Workload<'a> { let timing = create_timer(AnyWorkId::InternalTiming("spawn"), nth_wave) .queued() .run(); - for _ in 0..runnable_count { + for _ in 0..launchable.len() { let send = send.clone(); let run_queue = run_queue.clone(); let abort = abort_queued_jobs.clone(); @@ -671,11 +755,12 @@ impl<'a> Workload<'a> { while let Some((completed_id, result, timing)) = opt_complete.take() { if !match result { Ok(..) => { - let inserted = self.success.insert(completed_id.clone()); - if inserted { + if !self.success.contains(&completed_id) { successes.push((completed_id.clone(), timing)); + true + } else { + false } - inserted } Err(e) => { self.n_failures += 1; @@ -691,12 +776,9 @@ impl<'a> Workload<'a> { panic!("Repeat signals for completion of {completed_id:#?}"); } - // When a job marks another as also completed the original may never have been in pending - self.jobs_pending.remove(&completed_id); - log::debug!( "{}/{} complete, most recently {:?}", - self.n_failures + self.success.len(), + self.n_failures + self.success.len() + successes.len(), self.job_count, completed_id ); @@ -777,40 +859,29 @@ impl<'a> Workload<'a> { let id = &launchable[0]; let timing = create_timer(id.clone(), 0); - let job = self.jobs_pending.remove(id).unwrap(); - if job.run { - let timing = timing.queued(); - let context = - AnyContext::for_work(fe_root, be_root, id, job.read_access, job.write_access); - log::debug!("Exec {:?}", id); - let timing = timing.run(); - job.work - .unwrap_or_else(|| panic!("{id:?} should have work!")) - .exec(context) - .unwrap_or_else(|e| panic!("{id:?} failed: {e:?}")); - - for counter in self.counters(id) { - counter.fetch_sub(1, Ordering::AcqRel); - } + let job = self.jobs_pending.get(id).unwrap(); - let timing = timing.complete(); - assert!( - self.success.insert(id.clone()), - "We just did {id:?} a second time?" - ); - self.handle_success(fe_root, be_root, id.clone(), timing) - .unwrap_or_else(|e| panic!("Failed to handle success for {id:?}: {e}")); - } else { - for counter in self.counters(id) { - counter.fetch_sub(1, Ordering::AcqRel); - } - if let Some(also_completes) = self.also_completes.get(id).cloned() { - self.complete_one(id.clone(), false); - for also in also_completes { - self.complete_one(also, true); - } - } + let timing = timing.queued(); + let context = AnyContext::for_work( + fe_root, + be_root, + id, + job.read_access.clone(), + job.write_access.clone(), + ); + log::debug!("Exec {:?}", id); + let timing = timing.run(); + job.work + .exec(context) + .unwrap_or_else(|e| panic!("{id:?} failed: {e:?}")); + + for counter in self.counters(id) { + counter.fetch_sub(1, Ordering::AcqRel); } + + let timing = timing.complete(); + self.handle_success(fe_root, be_root, id.clone(), timing) + .unwrap_or_else(|e| panic!("Failed to handle success for {id:?}: {e}")); } self.success.difference(&pre_success).cloned().collect() } diff --git a/fontir/Cargo.toml b/fontir/Cargo.toml index 1d4224da1..5b7f086b7 100644 --- a/fontir/Cargo.toml +++ b/fontir/Cargo.toml @@ -33,9 +33,6 @@ write-fonts.workspace = true # for pens chrono.workspace = true smol_str.workspace = true -# unique to me! -blake3 = "1.3.3" - [dev-dependencies] diff.workspace = true ansi_term.workspace = true diff --git a/fontir/src/error.rs b/fontir/src/error.rs index d0a31f43d..edc3a2220 100644 --- a/fontir/src/error.rs +++ b/fontir/src/error.rs @@ -14,9 +14,9 @@ pub enum Error { /// A source file was not understood #[error(transparent)] BadSource(#[from] BadSource), - /// An error occured while attempting to track a file - #[error(transparent)] - TrackFile(#[from] TrackFileError), + /// What path?! + #[error("{0} does not exist")] + NoSuchPath(PathBuf), /// An error occured while converting a glyph to IR #[error(transparent)] BadGlyph(#[from] BadGlyph), @@ -99,14 +99,6 @@ pub enum BadSourceKind { Custom(String), } -/// An error that occurs when trying to access a file during change tracking -#[derive(Debug, Error)] -#[error("Error tracking '{path}': '{source}'")] -pub struct TrackFileError { - path: PathBuf, - source: io::Error, -} - /// An error that occurs while trying to convert a glyph to IR. /// /// This bundles up various failure cases along with the name of the glyph in @@ -211,15 +203,6 @@ impl BadSource { } } -impl TrackFileError { - pub(crate) fn new(path: impl Into, source: io::Error) -> Self { - TrackFileError { - path: path.into(), - source, - } - } -} - impl BadGlyph { /// Convenience method for creating a `BadGlyph` error. pub fn new(name: impl Into, kind: impl Into) -> Self { diff --git a/fontir/src/glyph.rs b/fontir/src/glyph.rs index c084f69e8..13d580532 100644 --- a/fontir/src/glyph.rs +++ b/fontir/src/glyph.rs @@ -563,7 +563,6 @@ mod tests { ir::{Component, Glyph, GlyphBuilder, GlyphInstance, GlyphOrder}, orchestration::{Context, Flags, WorkId}, paths::Paths, - source::Input, }; use super::*; @@ -605,7 +604,7 @@ mod tests { fn test_context() -> Context { let mut flags = Flags::default(); flags.set(Flags::EMIT_IR, false); // we don't want to write anything down - Context::new_root(flags, Paths::new(Path::new("/fake/path")), Input::new()) + Context::new_root(flags, Paths::new(Path::new("/fake/path"))) .copy_for_work(Access::All, Access::All) } diff --git a/fontir/src/lib.rs b/fontir/src/lib.rs index 8ef8a3dd6..a9d2dbe8d 100644 --- a/fontir/src/lib.rs +++ b/fontir/src/lib.rs @@ -5,7 +5,5 @@ pub mod glyph; pub mod ir; pub mod orchestration; pub mod paths; -pub(crate) mod serde; pub mod source; -pub mod stateset; pub mod variations; diff --git a/fontir/src/orchestration.rs b/fontir/src/orchestration.rs index ad7996938..23d5df173 100644 --- a/fontir/src/orchestration.rs +++ b/fontir/src/orchestration.rs @@ -9,7 +9,7 @@ use std::{ sync::Arc, }; -use crate::{error::Error, ir, paths::Paths, source::Input}; +use crate::{error::Error, ir, paths::Paths}; use bitflags::bitflags; use fontdrasil::{ coords::NormalizedLocation, @@ -301,7 +301,6 @@ pub enum WorkId { /// Build potentially variable font-wide metrics. GlobalMetrics, Glyph(GlyphName), - GlyphIrDelete(GlyphName), /// Glyph order from source, prior to adjustment /// /// Typically whatever2ir would populate this and then @@ -330,7 +329,6 @@ impl Identifier for WorkId { WorkId::StaticMetadata => "IrStaticMetadata", WorkId::GlobalMetrics => "IrGlobalMetrics", WorkId::Glyph(..) => "IrGlyph", - WorkId::GlyphIrDelete(..) => "IrGlyphDelete", WorkId::PreliminaryGlyphOrder => "IrPreliminaryGlyphOrder", WorkId::GlyphOrder => "IrGlyphOrder", WorkId::Features => "IrFeatures", @@ -402,10 +400,6 @@ pub struct Context { pub(crate) persistent_storage: Arc, - // The input we're working on. Note that change detection may mean we only process - // a subset of the full input. - pub input: Arc, - // work results we've completed or restored from disk // We create individual caches so we can return typed results from get fns pub static_metadata: FeContextItem, @@ -430,7 +424,6 @@ impl Context { Context { flags: self.flags, persistent_storage: self.persistent_storage.clone(), - input: self.input.clone(), static_metadata: self.static_metadata.clone_with_acl(acl.clone()), preliminary_glyph_order: self.preliminary_glyph_order.clone_with_acl(acl.clone()), glyph_order: self.glyph_order.clone_with_acl(acl.clone()), @@ -443,7 +436,7 @@ impl Context { } } - pub fn new_root(flags: Flags, paths: Paths, input: Input) -> Context { + pub fn new_root(flags: Flags, paths: Paths) -> Context { let acl = Arc::from(AccessControlList::read_only()); let persistent_storage = Arc::from(IrPersistentStorage { active: flags.contains(Flags::EMIT_IR), @@ -452,7 +445,6 @@ impl Context { Context { flags, persistent_storage: persistent_storage.clone(), - input: Arc::from(input), static_metadata: ContextItem::new( WorkId::StaticMetadata, acl.clone(), diff --git a/fontir/src/paths.rs b/fontir/src/paths.rs index 044e34c00..a36ae2def 100644 --- a/fontir/src/paths.rs +++ b/fontir/src/paths.rs @@ -71,9 +71,6 @@ impl Paths { WorkId::GlyphOrder => self.build_dir.join("glyph_order.yml"), WorkId::GlobalMetrics => self.build_dir.join("global_metrics.yml"), WorkId::Glyph(name) => self.glyph_ir_file(name.as_str()), - WorkId::GlyphIrDelete(name) => { - self.build_dir.join(format!("delete-{}.yml", name.as_str())) - } WorkId::Features => self.build_dir.join("features.yml"), WorkId::KerningGroups => self.build_dir.join("kern_groups.yml"), WorkId::KernInstance(location) => self.kern_ir_file(location), diff --git a/fontir/src/serde.rs b/fontir/src/serde.rs deleted file mode 100644 index 0f2c607ab..000000000 --- a/fontir/src/serde.rs +++ /dev/null @@ -1,99 +0,0 @@ -use std::{collections::HashMap, path::PathBuf}; - -use filetime::FileTime; -use serde::{Deserialize, Serialize}; - -use crate::stateset::{FileState, MemoryState, State, StateIdentifier, StateSet}; - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub(crate) struct StateSetSerdeRepr { - files: Vec, - slices: Vec, -} - -impl From for StateSet { - fn from(from: StateSetSerdeRepr) -> Self { - let entries: HashMap = from - .files - .into_iter() - .map(|serde_repr| { - ( - StateIdentifier::File(PathBuf::from(&serde_repr.path)), - State::File(FileState { - mtime: FileTime::from_unix_time(serde_repr.unix_seconds, serde_repr.nanos), - size: serde_repr.size, - }), - ) - }) - .chain(from.slices.into_iter().map(|serde_repr| { - ( - StateIdentifier::Memory(serde_repr.identifier), - State::Memory(MemoryState { - hash: blake3::Hash::from_hex(serde_repr.hash).unwrap(), - size: serde_repr.size, - }), - ) - })) - .collect(); - StateSet { entries } - } -} - -impl From for StateSetSerdeRepr { - fn from(fs: StateSet) -> Self { - let mut files = Vec::new(); - let mut slices = Vec::new(); - - for (key, state) in fs.entries { - match state { - State::File(state) => { - let StateIdentifier::File(path) = key else { - panic!("A file state *must* use a file key"); - }; - files.push(FileStateSerdeRepr { - path: path.to_str().expect("Only UTF names please").to_string(), - unix_seconds: state.mtime.unix_seconds(), - nanos: state.mtime.nanoseconds(), - size: state.size, - }); - } - State::Memory(state) => { - let StateIdentifier::Memory(identifier) = key else { - panic!("A file state *must* use a file key"); - }; - slices.push(SliceStateSerdeRepr { - identifier, - hash: state.hash.to_hex().to_string(), - size: state.size, - }); - } - } - } - files.sort_by(|e1, e2| e1.path.cmp(&e2.path)); - slices.sort_by(|e1, e2| e1.identifier.cmp(&e2.identifier)); - StateSetSerdeRepr { files, slices } - } -} - -/// The serde-friendly representation of a [FileState]. -/// -/// SystemTime lacks a platform independent representation we can -/// depend on so use FileTime's unix_seconds,nanos. -#[derive(Serialize, Deserialize, Debug, Clone)] -struct FileStateSerdeRepr { - path: String, - unix_seconds: i64, - nanos: u32, - size: u64, -} - -/// The serde-friendly representation of a [MemoryState]. -/// -/// SystemTime lacks a platform independent representation we can -/// depend on so use FileTime's unix_seconds,nanos. -#[derive(Serialize, Deserialize, Debug, Clone)] -struct SliceStateSerdeRepr { - identifier: String, - hash: String, - size: u64, -} diff --git a/fontir/src/source.rs b/fontir/src/source.rs index 947b7f7fd..d59a4a683 100644 --- a/fontir/src/source.rs +++ b/fontir/src/source.rs @@ -1,62 +1,29 @@ //! Generic model of font sources. -use std::{collections::HashMap, fs}; +use std::path::Path; -use indexmap::IndexSet; -use log::debug; -use serde::{Deserialize, Serialize}; +use fontdrasil::coords::NormalizedLocation; -use fontdrasil::{coords::NormalizedLocation, orchestration::Work, types::GlyphName}; +use crate::{error::Error, orchestration::IrWork}; -use crate::{ - error::Error, - orchestration::{Context, IrWork, WorkId}, - stateset::StateSet, -}; - -/// Destroy a file, such as the IR for a deleted glyph -#[derive(Debug)] -pub struct DeleteWork { - glyph_name: GlyphName, -} - -impl DeleteWork { - pub fn create(glyph_name: GlyphName) -> Box { - Box::new(DeleteWork { glyph_name }) - } -} - -impl Work for DeleteWork { - fn id(&self) -> WorkId { - WorkId::GlyphIrDelete(self.glyph_name.clone()) - } - - fn exec(&self, context: &Context) -> Result<(), Error> { - let path = context.persistent_storage.paths.target_file(&self.id()); - debug!("Delete {:#?}", path); - if path.exists() { - fs::remove_file(&path).map_err(|source| Error::DeleteFailed { path, source })? - } - Ok(()) - } -} - -/// Manipulations on some sort of font source. +/// A source of data from which one could compile a font. +/// +/// Expected to be implemented once per font format, e.g. one for .glyphs, one for ufo+ds, etc. pub trait Source { - /// Resolve a source to a set of files and their dependencies. - /// - /// Mut to permit caching. - fn inputs(&mut self) -> Result; + /// path is to the root entry, e.g. .glyphs file, .designspace, etc + fn new(root: &Path) -> Result + where + Self: Sized; /// Create a function that could be called to generate [crate::ir::StaticMetadata]. /// - /// When run work should update [Context] with new [crate::ir::StaticMetadata]. - fn create_static_metadata_work(&self, input: &Input) -> Result, Error>; + /// When run work should update [crate::orchestration::Context] with new [crate::ir::StaticMetadata]. + fn create_static_metadata_work(&self) -> Result, Error>; /// Create a function that could be called to generate [crate::ir::StaticMetadata]. /// - /// When run work should update [Context] with new [crate::ir::GlobalMetrics]. - fn create_global_metric_work(&self, input: &Input) -> Result, Error>; + /// When run work should update[crate::orchestration::Context] with new [crate::ir::GlobalMetrics]. + fn create_global_metric_work(&self) -> Result, Error>; /// Create a function that could be called to generate IR for glyphs. /// @@ -64,123 +31,23 @@ pub trait Source { /// Expected to return a Vec aligned with the glyph_names input. That is, /// result vec nth entry is the work for the nth glyph name. /// - /// When run work should update [Context] with [crate::ir::Glyph] and [crate::ir::Anchor] + /// When run work should update [crate::orchestration::Context] with [crate::ir::Glyph] and [crate::ir::Anchor] /// for the glyph name. - fn create_glyph_ir_work( - &self, - glyph_names: &IndexSet, - input: &Input, - ) -> Result>, Error>; + fn create_glyph_ir_work(&self) -> Result>, Error>; /// Create a function that could be called to generate or identify fea file(s). /// - /// When run work should update [Context] with [crate::ir::FeaturesSource]. - fn create_feature_ir_work(&self, input: &Input) -> Result, Error>; + /// When run work should update [crate::orchestration::Context] with [crate::ir::FeaturesSource]. + fn create_feature_ir_work(&self) -> Result, Error>; /// Create a function that could be called to produce kerning groups. /// - /// When run work should update [Context] with [crate::ir::KerningGroups]. - fn create_kerning_group_ir_work(&self, input: &Input) -> Result, Error>; + /// When run work should update [crate::orchestration::Context] with [crate::ir::KerningGroups]. + fn create_kerning_group_ir_work(&self) -> Result, Error>; /// Create a function that could be called to generate or identify kerning for a location. /// - /// When run work should update [Context] with [crate::ir::KerningInstance]. - fn create_kerning_instance_ir_work( - &self, - input: &Input, - at: NormalizedLocation, - ) -> Result, Error>; -} - -/// The files (in future non-file sources?) that drive various parts of IR -#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq)] -pub struct Input { - /// Font-wide metadata, such as upem. Things that should trigger a non-incremental build if they change. - /// Files that contribute to [crate::ir::StaticMetadata]. - pub static_metadata: StateSet, - - /// Font-wide metrics, such as ascender. - /// Files that contribute to [crate::ir::GlobalMetrics]. - pub global_metrics: StateSet, - - /// The input(s) that inform glyph IR construction, grouped by gyph name - pub glyphs: HashMap, - - /// The input(s) that inform feature IR construction - pub features: StateSet, -} - -impl Input { - pub fn new() -> Input { - Default::default() - } -} - -#[cfg(test)] -mod tests { - use std::{ - collections::HashMap, - fs, - path::{Path, PathBuf}, - }; - - use tempfile::{tempdir, TempDir}; - - use crate::stateset::StateSet; - - use super::Input; - - fn write(temp_dir: &TempDir, path: &Path, content: &str) -> PathBuf { - let path = temp_dir.path().join(path); - fs::write(&path, content).unwrap(); - path - } - - fn create_test_input(temp_dir: &TempDir) -> Input { - let mut font_info = StateSet::new(); - font_info - .track_file(&write(temp_dir, Path::new("some.designspace"), "blah")) - .unwrap(); - - let mut glyph = StateSet::new(); - glyph - .track_file(&write(temp_dir, Path::new("regular.space.glif"), "blah")) - .unwrap(); - glyph - .track_file(&write(temp_dir, Path::new("bold.space.glif"), "blah")) - .unwrap(); - - let mut glyphs = HashMap::new(); - glyphs.insert("space".into(), glyph); - - let mut features = StateSet::new(); - features - .track_file(&write(temp_dir, Path::new("features.fea"), "blah")) - .unwrap(); - - Input { - static_metadata: font_info.clone(), - global_metrics: font_info, - glyphs, - features, - } - } - - #[test] - fn read_write_yaml() { - let temp_dir = tempdir().unwrap(); - let ir_input = create_test_input(&temp_dir); - let yml = serde_yaml::to_string(&ir_input).unwrap(); - let restored: Input = serde_yaml::from_str(&yml).unwrap(); - assert_eq!(ir_input, restored); - } - - #[test] - fn read_write_bincode() { - let temp_dir = tempdir().unwrap(); - let ir_input = create_test_input(&temp_dir); - let bc = bincode::serialize(&ir_input).unwrap(); - let restored: Input = bincode::deserialize(&bc).unwrap(); - assert_eq!(ir_input, restored); - } + /// When run work should update [crate::orchestration::Context] with [crate::ir::KerningInstance]. + fn create_kerning_instance_ir_work(&self, at: NormalizedLocation) + -> Result, Error>; } diff --git a/fontir/src/stateset.rs b/fontir/src/stateset.rs deleted file mode 100644 index 02e2a4825..000000000 --- a/fontir/src/stateset.rs +++ /dev/null @@ -1,458 +0,0 @@ -use crate::{error::TrackFileError, serde::StateSetSerdeRepr}; -use filetime::FileTime; -use serde::{Deserialize, Serialize}; - -use std::{ - collections::{hash_map, HashMap, HashSet}, - fs, - hash::{Hash, Hasher}, - io, - path::{Path, PathBuf}, - vec, -}; - -/// Helps to identify changes in a set of stateful things. -#[derive(Serialize, Deserialize, Debug, Default, Clone, PartialEq)] -#[serde(from = "StateSetSerdeRepr", into = "StateSetSerdeRepr")] -pub struct StateSet { - pub(crate) entries: HashMap, -} - -// The key for a stateful thing of some specific type -#[derive(Debug, Clone, Hash, PartialEq, Eq)] -pub enum StateIdentifier { - File(PathBuf), - Memory(String), -} - -// A stateful thing of some specific type -#[derive(Debug, Clone, PartialEq)] -pub(crate) enum State { - File(FileState), - Memory(MemoryState), -} - -#[derive(Debug, Copy, Clone, PartialEq)] -pub(crate) struct FileState { - pub(crate) mtime: FileTime, - pub(crate) size: u64, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct MemoryState { - pub(crate) hash: blake3::Hash, - pub(crate) size: u64, -} - -impl FileState { - fn of(path: &Path) -> Result { - let metadata = path.metadata().map_err(|e| TrackFileError::new(path, e))?; - let mtime = metadata - .modified() - .map_err(|e| TrackFileError::new(path, e)) - .map(FileTime::from_system_time)?; - Ok(FileState { - mtime, - size: metadata.len(), - }) - } -} - -impl MemoryState { - fn of(thing: impl Hash) -> Result { - let mut hasher = Blake3Hasher::new(); - thing.hash(&mut hasher); - let hash = hasher.b3.finalize(); - Ok(MemoryState { - hash, - size: hasher.b3.count(), - }) - } -} - -// blake3 doesn't implement std Hasher which makes hashing an impl Hash difficult -struct Blake3Hasher { - b3: blake3::Hasher, -} - -impl Blake3Hasher { - fn new() -> Blake3Hasher { - Blake3Hasher { - b3: blake3::Hasher::new(), - } - } -} - -impl Hasher for Blake3Hasher { - fn finish(&self) -> u64 { - panic!("Use .b3.finalize() instead"); - } - - fn write(&mut self, bytes: &[u8]) { - self.b3.update(bytes); - } -} - -impl StateSet { - pub fn new() -> StateSet { - Default::default() - } - - pub fn keys(&self) -> KeyIterator { - KeyIterator { - iter: self.entries.keys(), - } - } - - pub fn is_empty(&self) -> bool { - self.entries.is_empty() - } - - pub fn contains(&self, path: &Path) -> bool { - self.entries - .contains_key(&StateIdentifier::File(path.to_path_buf())) - } - - /// Pay attention to path, we'd like to know if it changes. - pub fn track_file(&mut self, path: &Path) -> Result<(), TrackFileError> { - self.entries.insert( - StateIdentifier::File(path.to_path_buf()), - State::File(FileState::of(path)?), - ); - - // For a dir to register unchanged we need to add it's current contents - if path.is_dir() { - let mut dirs_visited = HashSet::new(); - for new_path in self.new_files(&mut dirs_visited, path) { - self.entries.insert( - StateIdentifier::File(new_path.clone()), - State::File(FileState::of(&new_path)?), - ); - } - } - Ok(()) - } - - /// Pay attention to this hashable thing, we'd quite like to know if it changes - /// - /// Identifier can be whatever you like. For file fragments a path-like construct - /// is suggested, e.g. myfile.glyphs/glyphs/layer/glyph perhaps. - pub fn track_memory( - &mut self, - identifier: String, - memory: &(impl Hash + ?Sized), - ) -> Result<(), TrackFileError> { - self.entries.insert( - StateIdentifier::Memory(identifier), - State::Memory(MemoryState::of(memory)?), - ); - Ok(()) - } - - fn new_files(&self, dirs_visited: &mut HashSet, dir: &Path) -> Vec { - assert!(dir.is_dir()); - if dirs_visited.contains(dir) { - return Vec::new(); - } - - let mut frontier = vec![dir.to_owned()]; - let mut results: Vec = Vec::new(); - - while let Some(dir) = frontier.pop() { - let dir_entries = fs::read_dir(&dir).expect("Unable to iterate directory"); - for dir_entry in dir_entries { - let dir_entry = dir_entry.expect("Cannot read dir entry").path(); - - // Queue subdir for processing. Bravely assume lack of cycles. - if dir_entry.is_dir() && !dirs_visited.contains(&dir_entry) { - frontier.push(dir_entry.to_owned()); - } - - // A file we've never seen before?! - if !dir_entry.is_dir() - && !self - .entries - .contains_key(&StateIdentifier::File(dir_entry.clone())) - { - results.push(dir_entry); - } - } - dirs_visited.insert(dir.to_owned()); - } - results - } - - /// We're what's new, Self - old_state; what's changed? - pub fn diff(&self, old_state: &StateSet) -> Result { - let old_keys: HashSet<&StateIdentifier> = old_state.entries.keys().collect(); - let new_keys: HashSet<&StateIdentifier> = self.entries.keys().collect(); - - let added: HashSet<&StateIdentifier> = new_keys.difference(&old_keys).cloned().collect(); - - Ok(StateDiff { - added: added.iter().map(|e| (*e).clone()).collect(), - updated: new_keys - .intersection(&old_keys) - .filter(|e| old_state.entries.get(e).unwrap() != self.entries.get(e).unwrap()) - .filter(|e| !added.contains(*e)) - .map(|e| (*e).clone()) - .collect(), - removed: old_keys - .difference(&new_keys) - .map(|e| (*e).clone()) - .collect(), - }) - } - // For tests. - #[doc(hidden)] - pub fn set_file_state(&mut self, path: &Path, mtime: FileTime, size: u64) { - self.entries.insert( - StateIdentifier::File(path.to_path_buf()), - State::File(FileState { mtime, size }), - ); - } -} - -pub struct KeyIterator<'a> { - iter: hash_map::Keys<'a, StateIdentifier, State>, -} - -impl<'a> Iterator for KeyIterator<'a> { - type Item = &'a StateIdentifier; - - fn next(&mut self) -> Option { - self.iter.next() - } -} - -#[derive(Debug, Default, PartialEq, Eq)] -pub struct StateDiff { - pub added: HashSet, - pub updated: HashSet, - pub removed: HashSet, -} - -impl StateDiff { - pub fn new() -> StateDiff { - StateDiff { - added: HashSet::new(), - updated: HashSet::new(), - removed: HashSet::new(), - } - } -} - -#[cfg(test)] -mod tests { - use std::{ - collections::HashSet, - fs, - ops::Add, - path::{Path, PathBuf}, - time::Duration, - }; - - use filetime::set_file_mtime; - use tempfile::{tempdir, TempDir}; - - use crate::error::TrackFileError; - - use super::{StateDiff, StateIdentifier, StateSet}; - - fn assert_no_file_changes(fs: &StateSet) { - assert_eq!( - StateDiff::new(), - update_file_entries(fs).unwrap().diff(fs).unwrap(), - ) - } - - #[test] - fn detect_memory_change() { - let p1 = String::from("/glyphs/space"); - let p2 = String::from("/glyphs/hyphen"); - - let mut s1 = StateSet::new(); - s1.track_memory(p1.clone(), "this is a glyph").unwrap(); - s1.track_memory(p2, "another glyph").unwrap(); - - let mut s2 = s1.clone(); - s2.track_memory(p1.clone(), "this changes everything") - .unwrap(); - - assert_eq!( - StateDiff { - updated: HashSet::from([StateIdentifier::Memory(p1)]), - ..Default::default() - }, - s2.diff(&s1).unwrap(), - ) - } - - #[test] - fn detect_file_change() { - let temp_dir = tempdir().unwrap(); - - let file = temp_dir.path().join("a"); - fs::write(&file, "eh").unwrap(); - - let mut fs = StateSet::new(); - fs.track_file(&file).unwrap(); - - assert_no_file_changes(&fs); - - // Detect changed size - fs::write(&file, "whoa").unwrap(); - let updated = update_file_entries(&fs).unwrap(); - let diff = updated.diff(&fs).unwrap(); - assert_eq!( - StateDiff { - updated: HashSet::from([StateIdentifier::File(file.to_owned())]), - ..Default::default() - }, - diff - ); - assert_no_file_changes(&updated); - - // Detect changed mtime - let new_mtime = file - .metadata() - .unwrap() - .modified() - .unwrap() - .add(Duration::from_secs(1)); - set_file_mtime(&file, new_mtime.into()).unwrap(); - let updated = update_file_entries(&fs).unwrap(); - let diff = updated.diff(&fs).unwrap(); - assert_eq!( - StateDiff { - updated: HashSet::from([StateIdentifier::File(file)]), - ..Default::default() - }, - diff - ); - assert_no_file_changes(&updated); - } - - #[test] - fn detect_dir_change() { - let temp_dir = tempdir().unwrap(); - - let subdir = temp_dir.path().join("glif"); - fs::create_dir(&subdir).unwrap(); - - let unchanged = temp_dir.path().join("fileA"); - let modified = subdir.join("fileB"); - let removed = subdir.join("fileC"); - fs::write(unchanged, "eh").unwrap(); - fs::write(&modified, "eh").unwrap(); - fs::write(&removed, "eh").unwrap(); - - // Track the parent dir from it's current state - let mut fs = StateSet::new(); - fs.track_file(temp_dir.path()).unwrap(); - - // Notably, we should NOT report the files temp dir as changed - // because we added the entire directory as unchanged and nothing - // in it has changed since then - assert_no_file_changes(&fs); - - // If we change or add files in the tracked dir that should count - let added = subdir.join("fileD"); - fs::write(&modified, "eh+").unwrap(); - fs::write(&added, "eh").unwrap(); - fs::remove_file(&removed).unwrap(); - - let diff = update_file_entries(&fs).unwrap().diff(&fs).unwrap(); - assert_eq!( - StateDiff { - added: HashSet::from([StateIdentifier::File(added)]), - updated: HashSet::from([StateIdentifier::File(modified)]), - removed: HashSet::from([StateIdentifier::File(removed)]), - }, - diff - ); - } - - fn write(temp_dir: &TempDir, path: &Path, content: &str) -> PathBuf { - let path = temp_dir.path().join(path); - fs::write(&path, content).unwrap(); - path - } - - fn one_changed_file_one_not(temp_dir: &TempDir) -> (PathBuf, PathBuf, StateSet) { - let unchanged = write(temp_dir, Path::new("a"), "eh"); - let changed = write(temp_dir, Path::new("b"), "todo:change"); - - let mut fs = StateSet::new(); - fs.track_file(&unchanged).unwrap(); - fs.track_file(&changed).unwrap(); - write(temp_dir, &changed, "eh"); - - assert!(fs.contains(&changed)); - assert!(fs.contains(&unchanged)); - - (changed, unchanged, fs) - } - - fn update_file_entries(s: &StateSet) -> Result { - let mut state = StateSet::new(); - for key in s.keys() { - if let StateIdentifier::File(path) = key { - if !path.exists() { - continue; - } - state.track_file(path)?; - } - } - Ok(state) - } - - #[test] - fn file_diff() { - let temp_dir = tempdir().unwrap(); - let (changed, unchanged, fs) = one_changed_file_one_not(&temp_dir); - - // Nothing changed - assert_eq!(StateDiff::new(), fs.diff(&fs).unwrap()); - - // Some stuff changed! - let added = write(&temp_dir, Path::new("new"), "meh"); - fs::remove_file(&unchanged).unwrap(); - let mut fs2 = update_file_entries(&fs).unwrap(); - fs2.track_file(&added).unwrap(); - assert_eq!( - StateDiff { - added: [StateIdentifier::File(added)].into(), - updated: [StateIdentifier::File(changed)].into(), - removed: [StateIdentifier::File(unchanged)].into(), - }, - fs2.diff(&fs).unwrap() - ); - } - - #[test] - fn read_write_yaml() { - let temp_dir = tempdir().unwrap(); - - let (_, _, mut fs) = one_changed_file_one_not(&temp_dir); - fs.track_memory("/glyph/glyph_name".to_string(), "Hi World!") - .unwrap(); - - let yml = serde_yaml::to_string(&fs).unwrap(); - let restored: StateSet = serde_yaml::from_str(&yml).expect(&yml); - assert_eq!(fs, restored); - } - - #[test] - fn read_write_bincode() { - let temp_dir = tempdir().unwrap(); - - let (_, _, mut fs) = one_changed_file_one_not(&temp_dir); - fs.track_memory("/glyph/glyph_name".to_string(), "Hi World!") - .unwrap(); - - let bc = bincode::serialize(&fs).unwrap(); - let restored: StateSet = bincode::deserialize(&bc).unwrap(); - assert_eq!(fs, restored); - } -} diff --git a/fontra2fontir/src/source.rs b/fontra2fontir/src/source.rs index 4452599cc..c3935ee18 100644 --- a/fontra2fontir/src/source.rs +++ b/fontra2fontir/src/source.rs @@ -1,5 +1,5 @@ use std::{ - collections::{BTreeMap, HashMap}, + collections::BTreeMap, fs::File, io::{BufRead, BufReader}, path::{Path, PathBuf}, @@ -11,8 +11,7 @@ use fontir::{ error::{BadSource, BadSourceKind, Error}, ir::StaticMetadata, orchestration::{Context, WorkId}, - source::{Input, Source}, - stateset::StateSet, + source::Source, }; use log::debug; @@ -23,142 +22,95 @@ use crate::{ pub struct FontraIrSource { fontdata_file: PathBuf, - glyphinfo_file: PathBuf, - glyph_dir: PathBuf, glyph_info: Arc)>>, } -impl FontraIrSource { - pub fn new(fontra_dir: PathBuf) -> Result { - let fontdata_file = fontra_dir.join("font-data.json"); - if !fontdata_file.is_file() { - return Err(BadSource::new(fontdata_file, BadSourceKind::ExpectedFile).into()); - } - let glyphinfo_file = fontra_dir.join("glyph-info.csv"); - if !glyphinfo_file.is_file() { - return Err(BadSource::new(glyphinfo_file, BadSourceKind::ExpectedFile).into()); - } - let glyph_dir = fontra_dir.join("glyphs"); - if !glyph_dir.is_dir() { - return Err(BadSource::new(glyph_dir, BadSourceKind::ExpectedDirectory).into()); - } - Ok(FontraIrSource { - fontdata_file, - glyphinfo_file, - glyph_dir, - glyph_info: Default::default(), - }) +fn parse_glyph_info(fontra_dir: &Path) -> Result)>, Error> { + let glyphinfo_file = fontra_dir.join("glyph-info.csv"); + if !glyphinfo_file.is_file() { + return Err(BadSource::new(glyphinfo_file, BadSourceKind::ExpectedFile).into()); } - // When things like upem may have changed forget incremental and rebuild the whole thing - fn static_metadata_state(&self) -> Result { - let mut font_info = StateSet::new(); - font_info.track_file(&self.fontdata_file)?; - font_info.track_file(&self.glyphinfo_file)?; - Ok(font_info) + // Read the glyph-info file + let file = File::open(&glyphinfo_file).map_err(|e| BadSource::new(&glyphinfo_file, e))?; + + let glyph_dir = fontra_dir.join("glyphs"); + if !glyph_dir.is_dir() { + return Err(BadSource::new(glyph_dir, BadSourceKind::ExpectedDirectory).into()); } - fn load_glyphinfo(&mut self) -> Result<(), BadSource> { - if !self.glyph_info.is_empty() { - return Ok(()); + // Example files suggest the first line is just the column headers. Hopefully always :) + // This file is tool generated so it shouldn't be full of human error. Fail if we don't understand. + let mut glyph_info = BTreeMap::default(); + for (i, line) in BufReader::new(file).lines().enumerate().skip(1) { + let line = line.map_err(|e| BadSource::new(&glyphinfo_file, BadSourceKind::Io(e)))?; + let parts: Vec<_> = line.split(';').collect(); + if parts.len() != 2 { + return Err(BadSource::custom( + &glyphinfo_file, + format!("Expected two parts in line {i} separated by ;"), + ) + .into()); } - - // Read the glyph-info file - let file = File::open(&self.glyphinfo_file) - .map_err(|e| BadSource::new(&self.glyphinfo_file, e))?; - - // Example files suggest the first line is just the column headers. Hopefully always :) - // This file is tool generated so it shouldn't be full of human error. Fail if we don't understand. - let mut glyph_info = BTreeMap::default(); - for (i, line) in BufReader::new(file).lines().enumerate().skip(1) { - let line = - line.map_err(|e| BadSource::new(&self.glyphinfo_file, BadSourceKind::Io(e)))?; - let parts: Vec<_> = line.split(';').collect(); - if parts.len() != 2 { - return Err(BadSource::custom( - &self.glyphinfo_file, - format!("Expected two parts in line {i} separated by ;"), - )); - } - let glyph_name = GlyphName::new(parts[0].trim()); - let codepoints = parts[1] - .split(',') - .filter_map(|codepoint| { - let codepoint = codepoint.trim(); - if codepoint.is_empty() { - return None; - } - let Some(codepoint) = codepoint.strip_prefix("U+") else { - return Some(Err(BadSource::custom( - &self.glyphinfo_file, - format!("Unintelligible codepoint {codepoint:?} at line {i}"), - ))); - }; - Some(u32::from_str_radix(codepoint, 16).map_err(|e| { - BadSource::custom( - &self.glyphinfo_file, - format!("Unintelligible codepoint {codepoint:?} at line {i}: {e}"), - ) - })) - }) - .collect::, _>>()?; - let glyph_file = fontra::glyph_file(&self.glyph_dir, glyph_name.clone()); - if !glyph_file.is_file() { - return Err(BadSource::new(glyph_file, BadSourceKind::ExpectedFile)); - } - - if glyph_info - .insert(glyph_name.clone(), (glyph_file, codepoints)) - .is_some() - { - return Err(BadSource::custom( - &self.glyphinfo_file, - format!("Multiple definitions of '{glyph_name}'"), - )); - } + let glyph_name = GlyphName::new(parts[0].trim()); + let codepoints = parts[1] + .split(',') + .filter_map(|codepoint| { + let codepoint = codepoint.trim(); + if codepoint.is_empty() { + return None; + } + let Some(codepoint) = codepoint.strip_prefix("U+") else { + return Some(Err(BadSource::custom( + &glyphinfo_file, + format!("Unintelligible codepoint {codepoint:?} at line {i}"), + ))); + }; + Some(u32::from_str_radix(codepoint, 16).map_err(|e| { + BadSource::custom( + &glyphinfo_file, + format!("Unintelligible codepoint {codepoint:?} at line {i}: {e}"), + ) + })) + }) + .collect::, _>>()?; + let glyph_file = fontra::glyph_file(&glyph_dir, glyph_name.clone()); + if !glyph_file.is_file() { + return Err(BadSource::new(glyph_file, BadSourceKind::ExpectedFile).into()); } - self.glyph_info = Arc::new(glyph_info); - Ok(()) - } - fn glyph_state(&mut self) -> Result, Error> { - let mut glyph_state = HashMap::default(); - for (glyph_name, (glyph_file, _)) in self.glyph_info.iter() { - let mut tracker = StateSet::new(); - tracker.track_file(glyph_file)?; - if glyph_state.insert(glyph_name.clone(), tracker).is_some() { - Err(BadSource::custom( - &self.glyphinfo_file, - format!("Multiple definitions of '{glyph_name}'"), - ))?; - } + if glyph_info + .insert(glyph_name.clone(), (glyph_file, codepoints)) + .is_some() + { + return Err(BadSource::custom( + &glyphinfo_file, + format!("Multiple definitions of '{glyph_name}'"), + ) + .into()); } - Ok(glyph_state) } + + Ok(glyph_info) } impl Source for FontraIrSource { - fn inputs(&mut self) -> Result { - let static_metadata = self.static_metadata_state()?; - self.load_glyphinfo()?; - let glyphs = self.glyph_state()?; + fn new(fontra_dir: &Path) -> Result { + let fontdata_file = fontra_dir.join("font-data.json"); + if !fontdata_file.is_file() { + return Err(BadSource::new(fontdata_file, BadSourceKind::ExpectedFile).into()); + } - // Not sure how fontra features work - let features = StateSet::new(); + let glyph_info = parse_glyph_info(fontra_dir)?; - // fontinfo.plist spans static metadata and global metrics. - // Just use the same change detection for both. - Ok(Input { - static_metadata: static_metadata.clone(), - global_metrics: static_metadata, - glyphs, - features, + Ok(FontraIrSource { + fontdata_file, + glyph_info: Arc::new(glyph_info), }) } fn create_static_metadata_work( &self, - _input: &fontir::source::Input, ) -> Result, fontir::error::Error> { FontraFontData::from_file(&self.fontdata_file)?; Ok(Box::new(StaticMetadataWork { @@ -169,36 +121,30 @@ impl Source for FontraIrSource { fn create_global_metric_work( &self, - _input: &fontir::source::Input, ) -> Result, fontir::error::Error> { todo!() } fn create_glyph_ir_work( &self, - _glyph_names: &indexmap::IndexSet, - _input: &fontir::source::Input, ) -> Result>, fontir::error::Error> { todo!() } fn create_feature_ir_work( &self, - _input: &fontir::source::Input, ) -> Result, fontir::error::Error> { todo!() } fn create_kerning_group_ir_work( &self, - _input: &fontir::source::Input, ) -> Result, fontir::error::Error> { todo!() } fn create_kerning_instance_ir_work( &self, - _input: &fontir::source::Input, _at: fontdrasil::coords::NormalizedLocation, ) -> Result, fontir::error::Error> { todo!() @@ -249,9 +195,7 @@ mod tests { #[test] fn glyph_info_of_minimal() { - let mut source = FontraIrSource::new(testdata_dir().join("minimal.fontra")).unwrap(); - // populates glyph_info - source.inputs().unwrap(); + let source = FontraIrSource::new(&testdata_dir().join("minimal.fontra")).unwrap(); assert_eq!( Arc::new( vec![( @@ -270,9 +214,7 @@ mod tests { #[test] fn glyph_info_of_2glyphs() { - let mut source = FontraIrSource::new(testdata_dir().join("2glyphs.fontra")).unwrap(); - // populates glyph_info - source.inputs().unwrap(); + let source = FontraIrSource::new(&testdata_dir().join("2glyphs.fontra")).unwrap(); assert_eq!( Arc::new( [ @@ -300,9 +242,7 @@ mod tests { #[test] fn glyph_info_0_1_n_codepoints() { - let mut source = FontraIrSource::new(testdata_dir().join("codepoints.fontra")).unwrap(); - // populates glyph_info - source.inputs().unwrap(); + let source = FontraIrSource::new(&testdata_dir().join("codepoints.fontra")).unwrap(); assert_eq!( vec![ (".notdef", vec![]), diff --git a/glyphs2fontir/src/source.rs b/glyphs2fontir/src/source.rs index 55884b3f6..f8fe6c110 100644 --- a/glyphs2fontir/src/source.rs +++ b/glyphs2fontir/src/source.rs @@ -1,12 +1,11 @@ use std::{ collections::{BTreeMap, BTreeSet, HashMap, HashSet}, - path::PathBuf, + path::Path, str::FromStr, sync::Arc, }; use chrono::DateTime; -use indexmap::IndexSet; use log::{debug, trace, warn}; use fontdrasil::{ @@ -22,12 +21,11 @@ use fontir::{ NameBuilder, NameKey, NamedInstance, StaticMetadata, DEFAULT_VENDOR_ID, }, orchestration::{Context, IrWork, WorkId}, - source::{Input, Source}, - stateset::StateSet, + source::Source, }; use glyphs_reader::{ glyphdata::{Category, Subcategory}, - CustomParameters, Font, InstanceType, + Font, InstanceType, }; use ordered_float::OrderedFloat; use smol_str::SmolStr; @@ -38,119 +36,13 @@ use write_fonts::{ use crate::toir::{design_location, to_ir_contours_and_components, to_ir_features, FontInfo}; +#[derive(Debug, Clone)] pub struct GlyphsIrSource { - glyphs_file: PathBuf, - cache: Option, -} - -struct Cache { - global_metadata: StateSet, + glyph_names: Arc>, font_info: Arc, } -impl Cache { - fn is_valid_for(&self, global_metadata: &StateSet) -> bool { - self.global_metadata == *global_metadata - } -} - -fn glyph_identifier(glyph_name: &str) -> String { - format!("/glyph/{glyph_name}") -} - -fn glyph_states(font: &Font) -> Result, Error> { - let mut glyph_states = HashMap::new(); - - for (glyphname, glyph) in font.glyphs.iter() { - let mut state = StateSet::new(); - state.track_memory(glyph_identifier(glyphname), glyph)?; - glyph_states.insert(glyphname.as_str().into(), state); - } - - Ok(glyph_states) -} - impl GlyphsIrSource { - pub fn new(glyphs_file: PathBuf) -> GlyphsIrSource { - GlyphsIrSource { - glyphs_file, - cache: None, - } - } - fn feature_inputs(&self, font: &Font) -> Result { - let mut state = StateSet::new(); - state.track_memory("/features".to_string(), &font.features)?; - Ok(state) - } - - // When things like upem may have changed forget incremental and rebuild the whole thing - fn static_metadata_inputs(&self, font: &Font) -> Result { - let mut state = StateSet::new(); - // Wipe out glyph-related fields, track the rest - // Explicitly field by field so if we add more compiler will force us to update here - let font = Font { - units_per_em: font.units_per_em, - axes: font.axes.clone(), - masters: font.masters.clone(), - default_master_idx: font.default_master_idx, - axis_mappings: font.axis_mappings.clone(), - instances: font.instances.clone(), - date: None, - custom_parameters: font.custom_parameters.clone(), - ..Default::default() - }; - state.track_memory("/font_master".to_string(), &font)?; - Ok(state) - } - - // Things that could change global metrics. - fn global_metric_inputs(&self, font: &Font) -> Result { - let mut state = StateSet::new(); - // Wipe out fields that can't impact global metrics. - // Explicitly field by field so if we add more compiler will force us to update here - let font = Font { - units_per_em: font.units_per_em, - axes: font.axes.clone(), - masters: font.masters.clone(), - default_master_idx: font.default_master_idx, - glyphs: Default::default(), - glyph_order: Default::default(), - axis_mappings: Default::default(), - virtual_masters: Default::default(), - features: Default::default(), - names: Default::default(), - instances: font.instances.clone(), - version_major: Default::default(), - version_minor: Default::default(), - date: None, - kerning_ltr: font.kerning_ltr.clone(), - custom_parameters: CustomParameters { - unicode_range_bits: None, - codepage_range_bits: None, - panose: None, - fs_type: None, - has_wws_names: None, - ..font.custom_parameters.clone() - }, - }; - state.track_memory("/font_master".to_string(), &font)?; - Ok(state) - } - - fn check_static_metadata(&self, global_metadata: &StateSet) -> Result<(), Error> { - // Do we have a plist cache? - // TODO: consider just recomputing here instead of failing - if !self - .cache - .as_ref() - .map(|pc| pc.is_valid_for(global_metadata)) - .unwrap_or(false) - { - return Err(Error::InvalidGlobalMetadata); - } - Ok(()) - } - fn create_work_for_one_glyph( &self, glyph_name: GlyphName, @@ -164,100 +56,61 @@ impl GlyphsIrSource { } impl Source for GlyphsIrSource { - fn inputs(&mut self) -> Result { + fn new(glyphs_file: &Path) -> Result { // We have to read the glyphs file then shred it to figure out if anything changed - let font_info = FontInfo::try_from(Font::load(&self.glyphs_file).map_err(|e| { + let font_info = FontInfo::try_from(Font::load(glyphs_file).map_err(|e| { BadSource::custom( - &self.glyphs_file, + glyphs_file.to_path_buf(), format!("Unable to read glyphs file: {e}"), ) })?)?; - let font = &font_info.font; - let static_metadata = self.static_metadata_inputs(font)?; - let global_metrics = self.global_metric_inputs(font)?; - let features = self.feature_inputs(font)?; - let glyphs = glyph_states(font)?; - self.cache = Some(Cache { - global_metadata: static_metadata.clone(), - font_info: Arc::new(font_info), - }); + let glyph_names = font_info + .font + .glyphs + .keys() + .map(|s| s.as_str().into()) + .collect(); - Ok(Input { - static_metadata, - global_metrics, - glyphs, - features, + Ok(Self { + glyph_names: Arc::new(glyph_names), + font_info: Arc::new(font_info), }) } - fn create_static_metadata_work(&self, input: &Input) -> Result, Error> { - self.check_static_metadata(&input.static_metadata)?; - let font_info = self.cache.as_ref().unwrap().font_info.clone(); - let glyph_names = Arc::new(input.glyphs.keys().cloned().collect()); - - Ok(Box::new(StaticMetadataWork { - font_info, - glyph_names, - })) + fn create_static_metadata_work(&self) -> Result, Error> { + Ok(Box::new(StaticMetadataWork(self.clone()))) } - fn create_global_metric_work(&self, input: &Input) -> Result, Error> { - self.check_static_metadata(&input.static_metadata)?; - let font_info = self.cache.as_ref().unwrap().font_info.clone(); - Ok(Box::new(GlobalMetricWork { font_info })) + fn create_global_metric_work(&self) -> Result, Error> { + Ok(Box::new(GlobalMetricWork(self.font_info.clone()))) } - fn create_glyph_ir_work( - &self, - glyph_names: &IndexSet, - input: &Input, - ) -> Result>, fontir::error::Error> { - self.check_static_metadata(&input.static_metadata)?; - - let cache = self.cache.as_ref().unwrap(); - + fn create_glyph_ir_work(&self) -> Result>, fontir::error::Error> { let mut work: Vec> = Vec::new(); - for glyph_name in glyph_names { + for glyph_name in self.glyph_names.iter() { work.push(Box::new(self.create_work_for_one_glyph( glyph_name.clone(), - cache.font_info.clone(), + self.font_info.clone(), )?)); } Ok(work) } - fn create_feature_ir_work(&self, input: &Input) -> Result, Error> { - self.check_static_metadata(&input.static_metadata)?; - - let cache = self.cache.as_ref().unwrap(); - - Ok(Box::new(FeatureWork { - font_info: cache.font_info.clone(), - })) + fn create_feature_ir_work(&self) -> Result, Error> { + Ok(Box::new(FeatureWork(self.font_info.clone()))) } - fn create_kerning_group_ir_work(&self, input: &Input) -> Result, Error> { - self.check_static_metadata(&input.static_metadata)?; - - let cache = self.cache.as_ref().unwrap(); - - Ok(Box::new(KerningGroupWork { - font_info: cache.font_info.clone(), - })) + fn create_kerning_group_ir_work(&self) -> Result, Error> { + Ok(Box::new(KerningGroupWork(self.font_info.clone()))) } fn create_kerning_instance_ir_work( &self, - input: &Input, at: NormalizedLocation, ) -> Result, Error> { - self.check_static_metadata(&input.static_metadata)?; - - let cache = self.cache.as_ref().unwrap(); - Ok(Box::new(KerningInstanceWork { - font_info: cache.font_info.clone(), + font_info: self.font_info.clone(), location: at, })) } @@ -345,10 +198,7 @@ fn names(font: &Font, flags: SelectionFlags) -> HashMap { } #[derive(Debug)] -struct StaticMetadataWork { - font_info: Arc, - glyph_names: Arc>, -} +struct StaticMetadataWork(GlyphsIrSource); impl Work for StaticMetadataWork { fn id(&self) -> WorkId { @@ -360,7 +210,7 @@ impl Work for StaticMetadataWork { } fn exec(&self, context: &Context) -> Result<(), Error> { - let font_info = self.font_info.as_ref(); + let font_info = self.0.font_info.as_ref(); let font = &font_info.font; debug!( "Static metadata for {}", @@ -517,7 +367,7 @@ impl Work for StaticMetadataWork { .glyph_order .iter() .map(|s| s.as_str().into()) - .filter(|gn| self.glyph_names.contains(gn)) + .filter(|gn| self.0.glyph_names.contains(gn)) .collect(); context.preliminary_glyph_order.set(glyph_order); Ok(()) @@ -579,9 +429,7 @@ fn category_for_glyph(glyph: &glyphs_reader::Glyph) -> Option { } #[derive(Debug)] -struct GlobalMetricWork { - font_info: Arc, -} +struct GlobalMetricWork(Arc); impl Work for GlobalMetricWork { fn id(&self) -> WorkId { @@ -593,7 +441,7 @@ impl Work for GlobalMetricWork { } fn exec(&self, context: &Context) -> Result<(), Error> { - let font_info = self.font_info.as_ref(); + let font_info = self.0.as_ref(); let font = &font_info.font; debug!( "Global metrics for {}", @@ -700,9 +548,7 @@ impl Work for GlobalMetricWork { } #[derive(Debug)] -struct FeatureWork { - font_info: Arc, -} +struct FeatureWork(Arc); impl Work for FeatureWork { fn id(&self) -> WorkId { @@ -711,7 +557,7 @@ impl Work for FeatureWork { fn exec(&self, context: &Context) -> Result<(), Error> { trace!("Generate features"); - let font_info = self.font_info.as_ref(); + let font_info = self.0.as_ref(); let font = &font_info.font; context.features.set(to_ir_features(&font.features)?); @@ -732,9 +578,7 @@ const SIDE1_PREFIX: &str = "@MMK_L_"; const SIDE2_PREFIX: &str = "@MMK_R_"; #[derive(Debug)] -struct KerningGroupWork { - font_info: Arc, -} +struct KerningGroupWork(Arc); #[derive(Debug)] struct KerningInstanceWork { @@ -782,7 +626,7 @@ impl Work for KerningGroupWork { fn exec(&self, context: &Context) -> Result<(), Error> { trace!("Generate IR for kerning"); - let font_info = self.font_info.as_ref(); + let font_info = self.0.as_ref(); let font = &font_info.font; let mut groups = KerningGroups::default(); @@ -1037,7 +881,7 @@ impl Work for GlyphIrWork { #[cfg(test)] mod tests { use std::{ - collections::{HashMap, HashSet}, + collections::HashSet, path::{Path, PathBuf}, }; @@ -1055,10 +899,9 @@ mod tests { orchestration::{Context, Flags, WorkId}, paths::Paths, source::Source, - stateset::StateSet, }; use glyphs_reader::{glyphdata::Category, Font}; - use indexmap::IndexSet; + use ir::{test_helpers::Round2, Panose}; use write_fonts::types::{NameId, Tag}; @@ -1082,70 +925,47 @@ mod tests { testdata_dir().join("glyphs3") } - fn glyph_state_for_file(dir: &Path, filename: &str) -> HashMap { - let glyphs_file = dir.join(filename); + fn glyphs_of(glyphs_file: PathBuf) -> HashSet { let font = Font::load(&glyphs_file).unwrap(); - glyph_states(&font).unwrap() + font.glyph_order.iter().map(|s| s.as_str().into()).collect() } #[test] - fn find_glyphs() { + fn find_wght_var_glyphs() { assert_eq!( HashSet::from([ - "space", - "hyphen", - "exclam", - "bracketleft", - "bracketright", - "manual-component" + GlyphName::new("space"), + "hyphen".into(), + "exclam".into(), + "bracketleft".into(), + "bracketright".into(), + "manual-component".into() ]), - glyph_state_for_file(&glyphs3_dir(), "WghtVar.glyphs") - .keys() - .map(|k| k.as_str()) - .collect::>() - ); - assert_eq!( - HashSet::from(["space", "hyphen", "exclam"]), - glyph_state_for_file(&glyphs3_dir(), "WghtVar_HeavyHyphen.glyphs") - .keys() - .map(|k| k.as_str()) - .collect::>() + glyphs_of(glyphs3_dir().join("WghtVar.glyphs")) ); } #[test] - fn detect_changed_glyphs() { - let keys: HashSet = - HashSet::from(["space".into(), "hyphen".into(), "exclam".into()]); - - let g1 = glyph_state_for_file(&glyphs3_dir(), "WghtVar.glyphs"); - let g2 = glyph_state_for_file(&glyphs3_dir(), "WghtVar_HeavyHyphen.glyphs"); - - let changed = keys - .into_iter() - .filter(|key| g1.get(key).unwrap() != g2.get(key).unwrap()) - .collect(); - assert_eq!(HashSet::::from(["hyphen".into()]), changed); + fn find_wght_var_heavy_hyphen_glyphs() { + assert_eq!( + HashSet::from([GlyphName::new("space"), "hyphen".into(), "exclam".into()]), + glyphs_of(glyphs3_dir().join("WghtVar_HeavyHyphen.glyphs")) + ); } - fn context_for(glyphs_file: PathBuf) -> (impl Source, Context) { - let mut source = GlyphsIrSource::new(glyphs_file); - let input = source.inputs().unwrap(); + fn context_for(glyphs_file: &Path) -> (impl Source, Context) { + let source = GlyphsIrSource::new(glyphs_file).unwrap(); let mut flags = Flags::default(); flags.set(Flags::EMIT_IR, false); // we don't want to write anything down ( source, - Context::new_root( - flags, - Paths::new(Path::new("/nothing/should/write/here")), - input, - ), + Context::new_root(flags, Paths::new(Path::new("/nothing/should/write/here"))), ) } #[test] fn static_metadata_ir() { - let (source, context) = context_for(glyphs3_dir().join("WghtVar.glyphs")); + let (source, context) = context_for(&glyphs3_dir().join("WghtVar.glyphs")); let task_context = context.copy_for_work( Access::None, AccessBuilder::new() @@ -1154,7 +974,7 @@ mod tests { .build(), ); source - .create_static_metadata_work(&context.input) + .create_static_metadata_work() .unwrap() .exec(&task_context) .unwrap(); @@ -1186,7 +1006,7 @@ mod tests { #[test] fn static_metadata_ir_multi_axis() { // Caused index out of bounds due to transposed master and value indices - let (source, context) = context_for(glyphs2_dir().join("BadIndexing.glyphs")); + let (source, context) = context_for(&glyphs2_dir().join("BadIndexing.glyphs")); let task_context = context.copy_for_work( Access::None, AccessBuilder::new() @@ -1195,7 +1015,7 @@ mod tests { .build(), ); source - .create_static_metadata_work(&context.input) + .create_static_metadata_work() .unwrap() .exec(&task_context) .unwrap(); @@ -1203,7 +1023,7 @@ mod tests { #[test] fn loads_axis_mappings_from_glyphs2() { - let (source, context) = context_for(glyphs2_dir().join("OpszWghtVar_AxisMappings.glyphs")); + let (source, context) = context_for(&glyphs2_dir().join("OpszWghtVar_AxisMappings.glyphs")); let task_context = context.copy_for_work( Access::None, AccessBuilder::new() @@ -1212,7 +1032,7 @@ mod tests { .build(), ); source - .create_static_metadata_work(&context.input) + .create_static_metadata_work() .unwrap() .exec(&task_context) .unwrap(); @@ -1263,7 +1083,7 @@ mod tests { fn build_static_metadata(glyphs_file: PathBuf) -> (impl Source, Context) { let _ = env_logger::builder().is_test(true).try_init(); - let (source, context) = context_for(glyphs_file); + let (source, context) = context_for(&glyphs_file); let task_context = context.copy_for_work( Access::None, AccessBuilder::new() @@ -1272,7 +1092,7 @@ mod tests { .build(), ); source - .create_static_metadata_work(&context.input) + .create_static_metadata_work() .unwrap() .exec(&task_context) .unwrap(); @@ -1286,7 +1106,7 @@ mod tests { Access::Variant(WorkId::GlobalMetrics), ); source - .create_global_metric_work(&context.input) + .create_global_metric_work() .unwrap() .exec(&task_context) .unwrap(); @@ -1305,13 +1125,13 @@ mod tests { .glyph_order .set((*context.preliminary_glyph_order.get()).clone()); - let work = source.create_kerning_group_ir_work(&context.input).unwrap(); + let work = source.create_kerning_group_ir_work().unwrap(); work.exec(&context.copy_for_work(work.read_access(), work.write_access())) .unwrap(); for location in context.kerning_groups.get().locations.iter() { let work = source - .create_kerning_instance_ir_work(&context.input, location.clone()) + .create_kerning_instance_ir_work(location.clone()) .unwrap(); work.exec(&context.copy_for_work(work.read_access(), work.write_access())) .unwrap(); @@ -1320,26 +1140,20 @@ mod tests { (source, context) } - fn build_glyphs( - source: &impl Source, - context: &Context, - glyph_names: &[&GlyphName], - ) -> Result<(), Error> { - for glyph_name in glyph_names { - let glyph_name = *glyph_name; - let work_items = source - .create_glyph_ir_work(&IndexSet::from([glyph_name.clone()]), &context.input) - .unwrap(); - for work in work_items.iter() { - let task_context = context.copy_for_work( - Access::Variant(WorkId::StaticMetadata), - AccessBuilder::new() - .specific_instance(WorkId::Glyph(glyph_name.clone())) - .specific_instance(WorkId::Anchor(glyph_name.clone())) - .build(), - ); - work.exec(&task_context)?; - } + fn build_glyphs(source: &impl Source, context: &Context) -> Result<(), Error> { + let work_items = source.create_glyph_ir_work().unwrap(); + for work in work_items.iter() { + let WorkId::Glyph(glyph_name) = work.id() else { + panic!("{:?} should be glyph work!", work.id()); + }; + let task_context = context.copy_for_work( + Access::Variant(WorkId::StaticMetadata), + AccessBuilder::new() + .specific_instance(WorkId::Glyph(glyph_name.clone())) + .specific_instance(WorkId::Anchor(glyph_name.clone())) + .build(), + ); + work.exec(&task_context)?; } Ok(()) } @@ -1349,7 +1163,7 @@ mod tests { let glyph_name: GlyphName = "space".into(); let (source, context) = build_static_metadata(glyphs2_dir().join("OpszWghtVar_AxisMappings.glyphs")); - build_glyphs(&source, &context, &[&glyph_name]).unwrap(); // we dont' care about geometry + build_glyphs(&source, &context).unwrap(); // we dont' care about geometry let static_metadata = context.static_metadata.get(); let axes = static_metadata @@ -1388,7 +1202,7 @@ mod tests { let glyph_name: GlyphName = "space".into(); let (source, context) = build_static_metadata(glyphs2_dir().join("OpszWghtVar_AxisMappings.glyphs")); - build_glyphs(&source, &context, &[&glyph_name]).unwrap(); // we dont' care about geometry + build_glyphs(&source, &context).unwrap(); let mut expected_locations = HashSet::new(); for (opsz, wght) in &[ @@ -1441,7 +1255,7 @@ mod tests { #[test] fn captures_single_codepoints() { let (source, context) = build_static_metadata(glyphs2_dir().join("WghtVar.glyphs")); - build_glyphs(&source, &context, &[&"hyphen".into()]).unwrap(); + build_glyphs(&source, &context).unwrap(); let glyph = context.glyphs.get(&WorkId::Glyph("hyphen".into())); assert_eq!(HashSet::from([0x002d]), glyph.codepoints); } @@ -1450,7 +1264,7 @@ mod tests { fn captures_single_codepoints_unquoted_dec() { let (source, context) = build_static_metadata(glyphs3_dir().join("Unicode-UnquotedDec.glyphs")); - build_glyphs(&source, &context, &[&"name".into()]).unwrap(); + build_glyphs(&source, &context).unwrap(); let glyph = context.glyphs.get(&WorkId::Glyph("name".into())); assert_eq!(HashSet::from([182]), glyph.codepoints); } @@ -1459,7 +1273,7 @@ mod tests { fn captures_multiple_codepoints_unquoted_dec() { let (source, context) = build_static_metadata(glyphs3_dir().join("Unicode-UnquotedDecSequence.glyphs")); - build_glyphs(&source, &context, &[&"name".into()]).unwrap(); + build_glyphs(&source, &context).unwrap(); let glyph = context.glyphs.get(&WorkId::Glyph("name".into())); assert_eq!(HashSet::from([1619, 1764]), glyph.codepoints); } @@ -1673,7 +1487,7 @@ mod tests { let expected = "M302,584 Q328,584 346,602 Q364,620 364,645 Q364,670 346,687.5 Q328,705 302,705 Q276,705 257.5,687.5 Q239,670 239,645 Q239,620 257.5,602 Q276,584 302,584 Z"; for test_dir in &[glyphs2_dir(), glyphs3_dir()] { let (source, context) = build_static_metadata(test_dir.join("QCurve.glyphs")); - build_glyphs(&source, &context, &[&glyph_name.into()]).unwrap(); + build_glyphs(&source, &context).unwrap(); let glyph = context.get_glyph(glyph_name); let default_instance = glyph .sources() @@ -1706,7 +1520,7 @@ mod tests { let base_name = "A".into(); let mark_name = "macroncomb".into(); let (source, context) = build_static_metadata(glyphs3_dir().join("WghtVar_Anchors.glyphs")); - build_glyphs(&source, &context, &[&base_name, &mark_name]).unwrap(); + build_glyphs(&source, &context).unwrap(); let base = context.anchors.get(&WorkId::Anchor(base_name)); let mark = context.anchors.get(&WorkId::Anchor(mark_name)); @@ -1731,16 +1545,7 @@ mod tests { fn reads_skip_export_glyphs() { let (source, context) = build_static_metadata(glyphs3_dir().join("WghtVar_NoExport.glyphs")); - build_glyphs( - &source, - &context, - &[ - &"manual-component".into(), - &"hyphen".into(), - &"space".into(), - ], - ) - .unwrap(); + build_glyphs(&source, &context).unwrap(); let is_export = |name: &str| { context .glyphs diff --git a/ufo2fontir/src/source.rs b/ufo2fontir/src/source.rs index 166de10af..55de154d4 100644 --- a/ufo2fontir/src/source.rs +++ b/ufo2fontir/src/source.rs @@ -19,10 +19,8 @@ use fontir::{ NamedInstance, Panose, PostscriptNames, StaticMetadata, DEFAULT_VENDOR_ID, }, orchestration::{Context, Flags, IrWork, WorkId}, - source::{Input, Source}, - stateset::{StateIdentifier, StateSet}, + source::Source, }; -use indexmap::IndexSet; use log::{debug, log_enabled, trace, warn, Level}; use norad::{ designspace::{self, DesignSpaceDocument}, @@ -39,48 +37,15 @@ use crate::toir::{master_locations, to_design_location, to_ir_axes, to_ir_glyph} const UFO_KERN1_PREFIX: &str = "public.kern1."; const UFO_KERN2_PREFIX: &str = "public.kern2."; +#[derive(Clone, Debug)] pub struct DesignSpaceIrSource { - designspace_or_ufo: PathBuf, - designspace: DesignSpaceDocument, - designspace_dir: PathBuf, - cache: Option, -} - -// A cache of locations, valid provided no global metadata changes -struct Cache { - static_metadata: StateSet, - locations: HashMap>, - designspace_file: PathBuf, + designspace_or_ufo: Arc, designspace: Arc, + designspace_dir: Arc, + glyphs: Arc>>>, fea_files: Arc>, } -impl Cache { - fn new( - static_metadata: StateSet, - locations: HashMap>, - designspace_file: PathBuf, - designspace: Arc, - feature_files: Vec, - ) -> Cache { - Cache { - static_metadata, - locations, - designspace_file, - designspace, - fea_files: Arc::from(feature_files), - } - } - - fn is_valid_for(&self, static_metadata: &StateSet) -> bool { - self.static_metadata == *static_metadata - } - - fn location_of(&self, glif_file: &Path) -> Option<&Vec> { - self.locations.get(glif_file) - } -} - fn glif_files( ufo_dir: &Path, layer_cache: &mut HashMap>, @@ -145,164 +110,85 @@ pub(crate) fn layer_dir<'a>( } impl DesignSpaceIrSource { - pub fn new(designspace_or_ufo: PathBuf) -> Result { - Self::load(designspace_or_ufo.clone()) - .map_err(|kind| BadSource::new(designspace_or_ufo, kind).into()) - } - - fn load(designspace_or_ufo: PathBuf) -> Result { - let Some(designspace_dir) = designspace_or_ufo.parent().map(|d| d.to_path_buf()) else { - return Err(BadSourceKind::ExpectedParent); - }; - let Some(ext) = designspace_or_ufo - .extension() - .map(|s| s.to_ascii_lowercase()) - else { - return Err(BadSourceKind::UnrecognizedExtension); - }; - let mut designspace = match ext.to_str() { - Some("designspace") => { - DesignSpaceDocument::load(&designspace_or_ufo).map_err(|e| match e { - norad::error::DesignSpaceLoadError::Io(e) => BadSourceKind::Io(e), - other => BadSourceKind::Custom(other.to_string()), - })? - } - Some("ufo") => { - let Some(filename) = designspace_or_ufo.file_name().and_then(|s| s.to_str()) else { - return Err(BadSourceKind::ExpectedDirectory); - }; - DesignSpaceDocument { - format: 4.1, - sources: vec![norad::designspace::Source { - filename: filename.to_owned(), - ..Default::default() - }], - ..Default::default() - } - } - _ => return Err(BadSourceKind::UnrecognizedExtension), - }; - - for (i, source) in designspace.sources.iter_mut().enumerate() { - if source.name.is_none() { - source.name = Some(format!("unnamed_source_{i}")); - } - } - - for (i, instance) in designspace.instances.iter_mut().enumerate() { - if instance.name.is_none() { - instance.name = Some(format!("unnamed_instance_{i}")) - } - } - - Ok(DesignSpaceIrSource { - designspace_or_ufo, - designspace_dir, - cache: None, - designspace, - }) - } - - // When things like upem may have changed forget incremental and rebuild the whole thing - fn static_metadata_state(&self) -> Result { - let mut font_info = StateSet::new(); - font_info.track_file(&self.designspace_or_ufo)?; - let (default_master_idx, _) = default_master(&self.designspace) - .ok_or_else(|| Error::NoDefaultMaster(self.designspace_or_ufo.clone()))?; - - for (idx, source) in self.designspace.sources.iter().enumerate() { - let ufo_dir = self.designspace_dir.join(&source.filename); - for filename in ["fontinfo.plist", "lib.plist"] { - // Only track lib.plist for the default master - if filename == "lib.plist" && idx != default_master_idx { - continue; - } - - let file = ufo_dir.join(filename); - if !file.is_file() { - continue; - } - font_info.track_file(&file)?; - } - } - Ok(font_info) - } - - fn check_static_metadata(&self, global_metadata: &StateSet) -> Result<(), Error> { - // Do we have the location of glifs written down? - // TODO: consider just recomputing here instead of failing - if !self - .cache - .as_ref() - .map(|gl| gl.is_valid_for(global_metadata)) - .unwrap_or(false) - { - return Err(Error::InvalidGlobalMetadata); - } - Ok(()) - } - fn create_work_for_one_glyph( &self, glyph_name: &GlyphName, export: bool, - input: &Input, ) -> Result { // A single glif could be used by many source blocks that use the same layer // *gasp* // So resolve each file to 1..N locations in designspace + let Some(glif_files) = self.glyphs.get(glyph_name) else { + return Err(Error::NoLocationsForGlyph(glyph_name.clone())); + }; - let stateset = input - .glyphs - .get(glyph_name) - .ok_or_else(|| Error::NoStateForGlyph(glyph_name.clone()))?; - let mut glif_files = HashMap::new(); - let cache = self.cache.as_ref().unwrap(); - for state_key in stateset.keys() { - let StateIdentifier::File(glif_file) = state_key else { - return Err(Error::UnexpectedState); - }; - let locations = cache - .location_of(glif_file) - .ok_or_else(|| Error::NoLocationsForGlyph(glyph_name.clone()))?; - glif_files.insert(glif_file.to_path_buf(), locations.clone()); - } Ok(GlyphIrWork { glyph_name: glyph_name.clone(), export, - glif_files, + glif_files: glif_files.clone(), }) } } -impl Source for DesignSpaceIrSource { - fn inputs(&mut self) -> Result { - let static_metadata = self.static_metadata_state()?; +fn load_designspace( + designspace_or_ufo: &Path, +) -> Result<(PathBuf, DesignSpaceDocument), BadSourceKind> { + let Some(designspace_dir) = designspace_or_ufo.parent().map(|d| d.to_path_buf()) else { + return Err(BadSourceKind::ExpectedParent); + }; - // glif filenames are not reversible so we need to read contents.plist to figure out groups - // See https://github.com/unified-font-object/ufo-spec/issues/164. - let mut glyphs: HashMap = HashMap::new(); + let Some(ext) = designspace_or_ufo + .extension() + .map(|s| s.to_ascii_lowercase()) + else { + return Err(BadSourceKind::UnrecognizedExtension); + }; + let designspace = match ext.to_str() { + Some("designspace") => { + DesignSpaceDocument::load(designspace_or_ufo).map_err(|e| match e { + norad::error::DesignSpaceLoadError::Io(e) => BadSourceKind::Io(e), + other => BadSourceKind::Custom(other.to_string()), + })? + } + Some("ufo") => { + let Some(filename) = designspace_or_ufo.file_name().and_then(|s| s.to_str()) else { + return Err(BadSourceKind::ExpectedDirectory); + }; + DesignSpaceDocument { + format: 4.1, + sources: vec![norad::designspace::Source { + filename: filename.to_owned(), + ..Default::default() + }], + ..Default::default() + } + } + _ => return Err(BadSourceKind::UnrecognizedExtension), + }; + debug!("Loaded {ext:?} from {designspace_or_ufo:?}"); + Ok((designspace_dir, designspace)) +} - // UFO filename => map of layer - let mut layer_cache = HashMap::new(); - let mut glif_locations: HashMap> = HashMap::new(); - let mut glyph_names: HashSet = HashSet::new(); +impl Source for DesignSpaceIrSource { + fn new(designspace_or_ufo_file: &Path) -> Result { + let (designspace_dir, mut designspace) = load_designspace(designspace_or_ufo_file) + .map_err(|kind| { + Error::BadSource(BadSource::new(designspace_or_ufo_file.to_path_buf(), kind)) + })?; - let Some((default_master_idx, default_master)) = default_master(&self.designspace) else { - return Err(Error::NoDefaultMaster(self.designspace_or_ufo.clone())); - }; - let mut sources_default_first = vec![default_master]; - sources_default_first.extend( - self.designspace - .sources - .iter() - .enumerate() - .filter(|(idx, _)| *idx != default_master_idx) - .map(|(_, s)| s), - ); + for (i, source) in designspace.sources.iter_mut().enumerate() { + if source.name.is_none() { + source.name = Some(format!("unnamed_source_{i}")); + } + } - let tags_by_name = self - .designspace + for (i, instance) in designspace.instances.iter_mut().enumerate() { + if instance.name.is_none() { + instance.name = Some(format!("unnamed_instance_{i}")) + } + } + + let axis_tags_by_name = designspace .axes .iter() .map(|a| { @@ -314,158 +200,119 @@ impl Source for DesignSpaceIrSource { }) }) .collect::, _>>()?; - for (idx, source) in sources_default_first.iter().enumerate() { + + // glif filenames are not reversible so we need to read contents.plist to figure out groups + // See https://github.com/unified-font-object/ufo-spec/issues/164. + // UFO filename => map of layer + let mut layer_cache = HashMap::new(); + + let mut glyphs = HashMap::>>::new(); + + let Some((default_master_idx, _)) = default_master(&designspace) else { + return Err(Error::NoDefaultMaster( + designspace_or_ufo_file.to_path_buf(), + )); + }; + let mut sources_indices_default_first = (0..designspace.sources.len()).collect::>(); + sources_indices_default_first.swap(0, default_master_idx); + + for source_idx in sources_indices_default_first { + let source = &designspace.sources[source_idx]; // Track files within each UFO // The UFO dir *must* exist since we were able to find fontinfo in it earlier - let ufo_dir = self.designspace_dir.join(&source.filename); - - let location = to_design_location(&tags_by_name, &source.location); + let ufo_dir = designspace_dir.join(&source.filename); + let location = to_design_location(&axis_tags_by_name, &source.location); for (glyph_name, glif_file) in glif_files(&ufo_dir, &mut layer_cache, source)? { if !glif_file.exists() { return Err(BadSource::new(glif_file, BadSourceKind::ExpectedFile).into()); } - if idx > 0 && !glyph_names.contains(&glyph_name) { + // Default master went first, so if we've never seen this glyph before that's weird + if source_idx != default_master_idx && !glyphs.contains_key(&glyph_name) { warn!("The glyph name '{:?}' exists in {} but not in the default master and will be ignored", glyph_name, source.filename); continue; } - glyph_names.insert(glyph_name.clone()); - let glif_file = glif_file.clone(); + glyphs .entry(glyph_name) .or_default() - .track_file(&glif_file)?; - let glif_locations = glif_locations.entry(glif_file).or_default(); - glif_locations.push(location.clone()); + .entry(glif_file) + .or_default() + .push(location.clone()); } } - if glyph_names.is_empty() { + if glyphs.is_empty() { warn!("No glyphs identified"); } else { - debug!("{} glyphs identified", glyph_names.len()); - } - - let ds_dir = self.designspace_or_ufo.parent().unwrap(); - - // We haven't parsed fea files yet, and we don't want to so make the educated guess that - // any feature file in designspace directory is an include instead of only looking at the - // ufo dir / features.fea files. - let mut features = StateSet::new(); - let mut paths = vec![ds_dir.to_path_buf()]; - while let Some(path) = paths.pop() { - if path.is_dir() { - for entry in path.read_dir().map_err(|e| BadSource::new(&path, e))? { - let entry = entry.map_err(|e| BadSource::new(&path, e))?; - paths.push(entry.path()); - } - } - if path.is_file() - && path - .file_name() - .and_then(|n| n.to_str()) - .map(|n| n.ends_with(".fea")) - .unwrap_or_default() - { - trace!("Tracking {path:?} as a feature file"); - features.track_file(&path)?; - } + debug!("{} glyphs identified", glyphs.len()); } // For compilation purposes we start from ufo dir / features.fea - let fea_files: Vec<_> = self - .designspace + let fea_files: Vec<_> = designspace .sources .iter() .filter_map(|s| { - let fea_file = ds_dir.join(&s.filename).join("features.fea"); + let fea_file = designspace_dir.join(&s.filename).join("features.fea"); fea_file.is_file().then_some(fea_file) }) .collect(); - self.cache = Some(Cache::new( - static_metadata.clone(), - glif_locations, - self.designspace_or_ufo.clone(), - Arc::from(self.designspace.clone()), - fea_files, - )); - - // fontinfo.plist spans static metadata and global metrics. - // Just use the same change detection for both. - Ok(Input { - static_metadata: static_metadata.clone(), - global_metrics: static_metadata, - glyphs, - features, + Ok(DesignSpaceIrSource { + designspace_or_ufo: Arc::new(designspace_or_ufo_file.to_path_buf()), + designspace: Arc::new(designspace), + designspace_dir: Arc::new(designspace_dir), + glyphs: Arc::new(glyphs), + fea_files: Arc::new(fea_files), }) } - fn create_static_metadata_work(&self, input: &Input) -> Result, Error> { - self.check_static_metadata(&input.static_metadata)?; - let cache = self.cache.as_ref().unwrap(); - - let glyph_names = Arc::new(input.glyphs.keys().cloned().collect()); - + fn create_static_metadata_work(&self) -> Result, Error> { Ok(Box::new(StaticMetadataWork { - designspace_file: cache.designspace_file.clone(), - designspace: cache.designspace.clone(), - glyph_names, + designspace_or_ufo: self.designspace_or_ufo.clone(), + designspace_dir: self.designspace_dir.clone(), + designspace: self.designspace.clone(), + glyph_names: Arc::new(self.glyphs.keys().cloned().collect()), })) } - fn create_global_metric_work(&self, input: &Input) -> Result, Error> { - self.check_static_metadata(&input.static_metadata)?; - let cache = self.cache.as_ref().unwrap(); - + fn create_global_metric_work(&self) -> Result, Error> { Ok(Box::new(GlobalMetricsWork { - designspace_file: cache.designspace_file.clone(), - designspace: cache.designspace.clone(), + designspace_or_ufo: self.designspace_or_ufo.clone(), + designspace_dir: self.designspace_dir.clone(), + designspace: self.designspace.clone(), })) } - fn create_feature_ir_work(&self, input: &Input) -> Result, Error> { - self.check_static_metadata(&input.static_metadata)?; - let cache = self.cache.as_ref().unwrap(); - + fn create_feature_ir_work(&self) -> Result, Error> { + debug!("CREATE FEATURES"); Ok(Box::new(FeatureWork { - designspace_file: cache.designspace_file.clone(), - fea_files: cache.fea_files.clone(), + designspace_or_ufo: self.designspace_or_ufo.clone(), + fea_files: self.fea_files.clone(), })) } - fn create_kerning_group_ir_work(&self, input: &Input) -> Result, Error> { - self.check_static_metadata(&input.static_metadata)?; - let cache = self.cache.as_ref().unwrap(); - + fn create_kerning_group_ir_work(&self) -> Result, Error> { Ok(Box::new(KerningGroupWork { - designspace_file: cache.designspace_file.clone(), - designspace: cache.designspace.clone(), + designspace_or_ufo: self.designspace_or_ufo.clone(), + designspace_dir: self.designspace_dir.clone(), + designspace: self.designspace.clone(), })) } fn create_kerning_instance_ir_work( &self, - input: &Input, at: NormalizedLocation, ) -> Result, Error> { - self.check_static_metadata(&input.static_metadata)?; - let cache = self.cache.as_ref().unwrap(); - Ok(Box::new(KerningInstanceWork { - designspace_file: cache.designspace_file.clone(), - designspace: cache.designspace.clone(), + designspace_or_ufo: self.designspace_or_ufo.clone(), + designspace_dir: self.designspace_dir.clone(), + designspace: self.designspace.clone(), location: at, })) } - fn create_glyph_ir_work( - &self, - glyph_names: &IndexSet, - input: &Input, - ) -> Result>, Error> { - self.check_static_metadata(&input.static_metadata)?; - + fn create_glyph_ir_work(&self) -> Result>, Error> { let no_export: HashSet = if let Some(plist::Value::Array(no_export)) = self.designspace.lib.get("public.skipExportGlyphs") { @@ -483,11 +330,10 @@ impl Source for DesignSpaceIrSource { // So resolve each file to 1..N locations in designspace let mut work: Vec> = Vec::new(); - for glyph_name in glyph_names { + for glyph_name in self.glyphs.keys() { work.push(Box::new(self.create_work_for_one_glyph( glyph_name, !no_export.contains(glyph_name), - input, )?)); } @@ -497,32 +343,36 @@ impl Source for DesignSpaceIrSource { #[derive(Debug)] struct StaticMetadataWork { - designspace_file: PathBuf, + designspace_or_ufo: Arc, + designspace_dir: Arc, designspace: Arc, glyph_names: Arc>, } #[derive(Debug)] struct GlobalMetricsWork { - designspace_file: PathBuf, + designspace_or_ufo: Arc, + designspace_dir: Arc, designspace: Arc, } #[derive(Debug)] struct FeatureWork { - designspace_file: PathBuf, + designspace_or_ufo: Arc, fea_files: Arc>, } #[derive(Debug)] struct KerningGroupWork { - designspace_file: PathBuf, + designspace_or_ufo: Arc, + designspace_dir: Arc, designspace: Arc, } #[derive(Debug)] struct KerningInstanceWork { - designspace_file: PathBuf, + designspace_or_ufo: Arc, + designspace_dir: Arc, designspace: Arc, location: NormalizedLocation, } @@ -840,10 +690,12 @@ impl Work for StaticMetadataWork { } fn exec(&self, context: &Context) -> Result<(), Error> { - debug!("Static metadata for {:#?}", self.designspace_file); - let designspace_dir = self.designspace_file.parent().unwrap(); + debug!("Static metadata for {:#?}", self.designspace_or_ufo); + let designspace_dir = self.designspace_dir.as_ref(); let Some((_, default_master)) = default_master(&self.designspace) else { - return Err(Error::NoDefaultMaster(self.designspace_file.clone())); + return Err(Error::NoDefaultMaster( + self.designspace_or_ufo.to_path_buf(), + )); }; let font_infos = font_infos(designspace_dir, &self.designspace)?; let font_info_at_default = font_infos.get(&default_master.filename).ok_or_else(|| { @@ -1114,10 +966,10 @@ impl Work for GlobalMetricsWork { } fn exec(&self, context: &Context) -> Result<(), Error> { - debug!("Global metrics for {:#?}", self.designspace_file); + debug!("Global metrics for {:#?}", self.designspace_or_ufo); let static_metadata = context.static_metadata.get(); - let designspace_dir = self.designspace_file.parent().unwrap(); + let designspace_dir = self.designspace_dir.as_ref(); let font_infos = font_infos(designspace_dir, &self.designspace)?; let master_locations = master_locations(&static_metadata.all_source_axes, &self.designspace.sources); @@ -1283,7 +1135,7 @@ impl Work for FeatureWork { } fn exec(&self, context: &Context) -> Result<(), Error> { - debug!("Features for {:#?}", self.designspace_file); + debug!("Features for {:#?}", self.designspace_or_ufo); let fea_files = self.fea_files.as_ref(); for fea_file in fea_files.iter().skip(1) { @@ -1397,15 +1249,17 @@ impl Work for KerningGroupWork { } fn exec(&self, context: &Context) -> Result<(), Error> { - debug!("Kerning groups for {:#?}", self.designspace_file); + debug!("Kerning groups for {:#?}", self.designspace_or_ufo); - let designspace_dir = self.designspace_file.parent().unwrap(); + let designspace_dir = self.designspace_dir.as_ref(); let glyph_order = context.glyph_order.get(); let static_metadata = context.static_metadata.get(); let master_locations = master_locations(&static_metadata.all_source_axes, &self.designspace.sources); let Some((default_master_idx, default_master)) = default_master(&self.designspace) else { - return Err(Error::NoDefaultMaster(self.designspace_file.clone())); + return Err(Error::NoDefaultMaster( + self.designspace_or_ufo.to_path_buf(), + )); }; // Step 1: find the groups @@ -1495,10 +1349,10 @@ impl Work for KerningInstanceWork { fn exec(&self, context: &Context) -> Result<(), Error> { debug!( "Kerning for {:#?} at {:?}", - self.designspace_file, self.location + self.designspace_or_ufo, self.location ); - let designspace_dir = self.designspace_file.parent().unwrap(); + let designspace_dir = self.designspace_dir.as_ref(); let static_metadata = context.static_metadata.get(); let glyph_order = context.glyph_order.get(); let groups = context.kerning_groups.get(); @@ -1656,7 +1510,7 @@ mod tests { }, orchestration::{Context, Flags, WorkId}, paths::Paths, - source::{Input, Source}, + source::Source, }; use norad::designspace; @@ -1714,10 +1568,8 @@ mod tests { ); } - fn load_designspace(name: &str) -> (DesignSpaceIrSource, Input) { - let mut source = DesignSpaceIrSource::new(testdata_dir().join(name)).unwrap(); - let input = source.inputs().unwrap(); - (source, input) + fn load_designspace(name: &str) -> DesignSpaceIrSource { + DesignSpaceIrSource::new(&testdata_dir().join(name)).unwrap() } fn default_test_flags() -> Flags { @@ -1726,12 +1578,8 @@ mod tests { fn build_static_metadata(name: &str, flags: Flags) -> (impl Source, Context) { let _ = env_logger::builder().is_test(true).try_init(); - let (source, input) = load_designspace(name); - let context = Context::new_root( - flags, - Paths::new(Path::new("/nothing/should/write/here")), - input, - ); + let source = load_designspace(name); + let context = Context::new_root(flags, Paths::new(Path::new("/nothing/should/write/here"))); let task_context = context.copy_for_work( Access::None, AccessBuilder::new() @@ -1740,7 +1588,7 @@ mod tests { .build(), ); source - .create_static_metadata_work(&context.input) + .create_static_metadata_work() .unwrap() .exec(&task_context) .unwrap(); @@ -1764,7 +1612,7 @@ mod tests { Access::Variant(WorkId::GlobalMetrics), ); source - .create_global_metric_work(&context.input) + .create_global_metric_work() .unwrap() .exec(&task_context) .unwrap(); @@ -1783,13 +1631,13 @@ mod tests { .glyph_order .set((*context.preliminary_glyph_order.get()).clone()); - let work = source.create_kerning_group_ir_work(&context.input).unwrap(); + let work = source.create_kerning_group_ir_work().unwrap(); work.exec(&context.copy_for_work(work.read_access(), work.write_access())) .unwrap(); for location in context.kerning_groups.get().locations.iter() { let work = source - .create_kerning_instance_ir_work(&context.input, location.clone()) + .create_kerning_instance_ir_work(location.clone()) .unwrap(); work.exec(&context.copy_for_work(work.read_access(), work.write_access())) .unwrap(); @@ -1802,10 +1650,9 @@ mod tests { let (source, context) = build_static_metadata(name, default_test_flags()); build_glyph_order(&context); - let glyph_order = context.glyph_order.get().iter().cloned().collect(); let task_context = context.copy_for_work(Access::All, Access::All); source - .create_glyph_ir_work(&glyph_order, &context.input) + .create_glyph_ir_work() .unwrap() .into_iter() .for_each(|w| w.exec(&task_context).unwrap()); @@ -1813,7 +1660,7 @@ mod tests { (source, context) } - fn load_wght_var() -> (DesignSpaceIrSource, Input) { + fn load_wght_var() -> DesignSpaceIrSource { load_designspace("wght_var.designspace") } @@ -1833,10 +1680,10 @@ mod tests { #[test] pub fn create_work() { - let (ir_source, input) = load_wght_var(); + let ir_source = load_wght_var(); let work = ir_source - .create_work_for_one_glyph(&"bar".into(), true, &input) + .create_work_for_one_glyph(&"bar".into(), true) .unwrap(); let mut expected_glif_files = HashMap::new(); @@ -1863,10 +1710,10 @@ mod tests { #[test] pub fn create_sparse_work() { - let (ir_source, input) = load_wght_var(); + let ir_source = load_wght_var(); let work = ir_source - .create_work_for_one_glyph(&"plus".into(), true, &input) + .create_work_for_one_glyph(&"plus".into(), true) .unwrap(); // Note there is NOT a glyphs.{600} version of plus @@ -1888,14 +1735,14 @@ mod tests { #[test] pub fn only_glyphs_present_in_default() { - let (_, inputs) = load_wght_var(); + let source = load_wght_var(); // bonus_bar is not present in the default master; should discard - assert!(!inputs.glyphs.contains_key("bonus_bar")); + assert!(!source.glyphs.contains_key("bonus_bar")); } #[test] pub fn find_default_master() { - let (source, _) = load_wght_var(); + let source = load_wght_var(); let tag = Tag::new(b"wght"); let mut loc = DesignLocation::new(); loc.insert(tag, DesignCoord::new(400.0)); @@ -1913,7 +1760,7 @@ mod tests { pub fn builds_glyph_order_for_wght_var() { // Only WghtVar-Regular.ufo has a lib.plist, and it only lists a subset of glyphs // Should still work. - let (source, _) = load_wght_var(); + let source = load_wght_var(); let (_, default_master) = default_master(&source.designspace).unwrap(); let lib_plist = load_plist( &source.designspace_dir.join(&default_master.filename), @@ -1955,14 +1802,14 @@ mod tests { #[test] pub fn fetches_upem() { - let (source, _) = load_wght_var(); + let source = load_wght_var(); let font_infos = font_infos(&source.designspace_dir, &source.designspace).unwrap(); assert_eq!(1000, units_per_em(font_infos.values()).unwrap()); } #[test] pub fn ot_rounds_upem() { - let (source, _) = load_designspace("float.designspace"); + let source = load_designspace("float.designspace"); let font_infos = font_infos(&source.designspace_dir, &source.designspace).unwrap(); assert_eq!( 256, // 255.5 rounded toward +infinity @@ -1972,7 +1819,7 @@ mod tests { #[test] pub fn default_names_for_minimal() { - let (source, _) = load_designspace("float.designspace"); + let source = load_designspace("float.designspace"); let font_info = font_infos(&source.designspace_dir, &source.designspace) .unwrap() .get(&String::from("Float-Regular.ufo"))