diff --git a/.commitlintrc.no-scope.yml b/.commitlintrc.no-scope.yml new file mode 100644 index 000000000..e97300cf7 --- /dev/null +++ b/.commitlintrc.no-scope.yml @@ -0,0 +1,36 @@ +rules: + # Body may be empty + body-empty: + level: ignore + + # Description must not be empty + description-empty: + level: error + + # Description must start with a capital letter and must not end with a period or space + description-format: + level: error + format: ^[A-Z0-9].*[^. ]$ + + # Description should be <70 chars + description-max-length: + level: warning + length: 70 + + # Scope may be empty + scope-empty: + level: ignore + + # Subject line should exist + subject-empty: + level: error + + # Type must be one of these options + type: + level: error + options: + - fix + - feat + - chore + - update + - doc diff --git a/.commitlintrc.yml b/.commitlintrc.yml new file mode 100644 index 000000000..a0b1e4587 --- /dev/null +++ b/.commitlintrc.yml @@ -0,0 +1,46 @@ +rules: + # Body may be empty + body-empty: + level: ignore + + # Description must not be empty + description-empty: + level: error + + # Description must start with a capital letter and must not end with a period or space + description-format: + level: error + format: ^[A-Z0-9].*[^. ]$ + + # Description should be <70 chars + description-max-length: + level: warning + length: 70 + + # Scope must be one of the following + scope: + level: error + options: + - export_schema + - make_test_images + - sdk + + # Scope may be empty + # (NOTE: Disabled for now while we work around + # https://github.com/KeisukeYamashita/commitlint-rs/issues/355.) + # scope-empty: + # level: ignore + + # Subject line should exist + subject-empty: + level: error + + # Type must be one of these options + type: + level: error + options: + - fix + - feat + - chore + - update + - doc diff --git a/.github/dependabot.yml b/.github/dependabot.yml index a239e10e4..b50356831 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -7,13 +7,26 @@ updates: directory: "sdk" schedule: interval: "daily" + commit-message: + prefix: "update" - package-ecosystem: "cargo" directory: "export_schema" schedule: interval: "daily" + commit-message: + prefix: "update" - package-ecosystem: "cargo" directory: "make_test_images" schedule: interval: "daily" + commit-message: + prefix: "update" + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + commit-message: + prefix: "chore" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cabdddcd9..b8b7484fc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,14 +1,21 @@ name: CI -on: +on: + workflow_dispatch: pull_request: + types: + - opened + - reopened + - synchronize push: - branches: main + branches: + - main + - nightly schedule: - cron: "0 18 * * 1,4,6" # 1800 UTC every Monday, Thursday, Saturday jobs: - tests-cargo: + tests: name: Unit tests runs-on: ${{ matrix.os }} @@ -16,11 +23,11 @@ jobs: fail-fast: false matrix: os: [windows-latest, macos-latest, ubuntu-latest] - rust_version: [stable, 1.74.0] + rust_version: [stable, 1.76.0] steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain uses: dtolnay/rust-toolchain@master @@ -41,11 +48,11 @@ jobs: fail-fast: false matrix: target: [aarch64-unknown-linux-gnu] - rust_version: [stable, 1.74.0] + rust_version: [stable, 1.76.0] steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain uses: dtolnay/rust-toolchain@master @@ -72,7 +79,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain uses: dtolnay/rust-toolchain@stable @@ -95,12 +102,10 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain uses: dtolnay/rust-toolchain@nightly - with: - components: llvm-tools-preview - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2 @@ -120,7 +125,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain uses: dtolnay/rust-toolchain@master @@ -139,31 +144,25 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain uses: dtolnay/rust-toolchain@stable - - - name: Install clippy - run: rustup component add clippy + with: + components: clippy - name: Cache Rust dependencies uses: Swatinem/rust-cache@v2 - - name: Cargo clippy - uses: actions-rs/clippy-check@v1 - with: - token: ${{ secrets.GITHUB_TOKEN }} - args: --all-features --all-targets -- -D warnings - env: - RUST_BACKTRACE: "1" + - name: Run Clippy + run: cargo clippy --all-features --all-targets -- -Dwarnings cargo_fmt: name: Enforce Rust code format runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install stable toolchain uses: dtolnay/rust-toolchain@nightly @@ -178,12 +177,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install nightly Rust toolchain # Nightly is used here because the docs.rs build # uses nightly and we use doc_cfg features that are - # not in stable Rust as of this writing (Rust 1.62). + # not in stable Rust as of this writing (Rust 1.76). uses: dtolnay/rust-toolchain@nightly - name: Run cargo docs @@ -212,10 +211,10 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Audit crate dependencies - uses: EmbarkStudios/cargo-deny-action@v1 + uses: EmbarkStudios/cargo-deny-action@v2 with: command: check ${{ matrix.checks }} @@ -224,7 +223,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain uses: dtolnay/rust-toolchain@nightly @@ -234,53 +233,3 @@ jobs: with: version: latest args: --all-targets --all-features - - version_bump: - name: Ensure (MINOR) tag is used when making an API breaking change - # Change all of these steps to (MAJOR) after 1.0 release - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: Install Rust toolchain - uses: dtolnay/rust-toolchain@stable - - - name: Get latest existing tag - uses: WyriHaximus/github-action-get-previous-tag@v1 - id: get-latest-tag - - - name: Set new version - uses: paulhatch/semantic-version@v5.2.1 - id: set-version - with: - tag_prefix: "v" - version_format: "${major}.${minor}.${patch}" - major_pattern: "(MAJOR)" - minor_pattern: "(MINOR)" - - - name: Generate changelog since last tag - run: | - { - echo 'changelog<> "$GITHUB_OUTPUT" - - - name: "Bump crate version (NOTE: Not pushed back to repo!)" - continue-on-error: true # If no change to crate version, ignore git commit error - run: | - sed -i "s/^version = \"[^\"]*\"$/version = \"$VERSION\"/;" sdk/Cargo.toml - git config user.email "nobody@example.com" - git config --global user.name "PR validation bot" - git add . - git commit -m "DO NOT PUSH BACK TO PR: Bump crate version" - env: - VERSION: ${{ steps.set-version.outputs.version }} - - - name: If this step fails, change title of the PR to include (MINOR) tag - uses: obi1kenobi/cargo-semver-checks-action@v2 - with: - package: c2pa diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 7d907d2e7..6d36a9d3f 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -1,121 +1,164 @@ -name: Nightly build +name: Nightly branch on: workflow_dispatch: + pull_request: + types: + - opened + - synchronize + - reopened + - edited + push: + branches: + - main schedule: - cron: "0 5 * * *" # 0500 UTC every day jobs: - # Create snapshot of main branch for nightly build + # Create snapshot of latest main or release branch for nightly build nightly-snapshot: - name: Create snapshot + name: Update nightly branch runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - token: ${{ secrets.GH_ADMIN_COMMIT_TOKEN }} + token: ${{ secrets.RELEASE_PLZ_TOKEN }} - - name: Set new proposed version - uses: paulhatch/semantic-version@v5.2.1 - id: set-version - with: - tag_prefix: "v" - version_format: "${major}.${minor}.${patch}" - major_pattern: "(MAJOR)" - minor_pattern: "(MINOR)" - - - name: Add -nightly+(date)-(commit ID) prefix to version - id: set-nightly-version + - name: Choose base commit for nightly snapshot + id: choose-base + env: + GH_EVENT_NAME: ${{ github.event_name }} run: | - echo version=${{ steps.set-version.outputs.version }}-nightly+`date +%F`-`git rev-parse --short HEAD` >> "$GITHUB_OUTPUT" + echo "Event triggered by $GH_EVENT_NAME" + echo - - name: Log new version & changelog - run: | - echo "Proposed new version: $VERSION" - echo "Nightly version: $NIGHTLY_VERSION" - env: - VERSION: ${{ steps.set-version.outputs.version }} - NIGHTLY_VERSION: ${{ steps.set-nightly-version.outputs.version }} + echo "Listing branches on remote" + echo + + git --no-pager branch --remote | sed 's|origin/||' + + echo + + git checkout -b nightly --track origin/nightly + OLD_BASE_COMMIT=$(git log nightly --skip=1 -n 1 --format=%H) + + echo "Nightly commit ID is $OLD_BASE_COMMIT" + + LATEST_RELEASE_PLZ=$(git --no-pager branch --format="%(refname:short)" --remote | + sed 's|origin/||' | + grep -E '^release-plz-20' | + sort -r | + head -n 1) + + if [ -z "$LATEST_RELEASE_PLZ" ]; then + + echo "No release-plz branch; using main" + + if git show-ref --verify --quiet refs/heads/main; then + echo "Branch 'main' already exists locally" + git checkout main; + else + git checkout -b main --track origin/main; + fi + + BASE_BRANCH=main; + + else + + echo "Found RP branch $LATEST_RELEASE_PLZ; comparing against main" + + git checkout -b "$LATEST_RELEASE_PLZ" --track "origin/$LATEST_RELEASE_PLZ" + + if git show-ref --verify --quiet refs/heads/main; then + echo "Branch 'main' already exists locally" + git checkout main; + else + git checkout -b main --track origin/main; + fi + + export RP_TS=$(git log -1 --format=%cd --date=iso-strict $LATEST_RELEASE_PLZ) + export MAIN_TS=$(git log -1 --format=%cd --date=iso-strict main) + + echo "Branch $LATEST_RELEASE_PLZ updated at $RP_TS" + echo "Branch main updated at $MAIN_TS" + + if [[ "$RP_TS" > "$MAIN_TS" ]]; then + git checkout $LATEST_RELEASE_PLZ + BASE_BRANCH=$LATEST_RELEASE_PLZ; + else + git checkout main + BASE_BRANCH=main; + fi + + fi + + NEW_BASE_COMMIT=$(git log $BASE_BRANCH -n 1 --format=%H) + + echo "Choosing base branch $BASE_BRANCH because it's newer" + echo "$BASE_BRANCH commit ID is $NEW_BASE_COMMIT" + + if [[ "$OLD_BASE_COMMIT" == "$NEW_BASE_COMMIT" && "$GH_EVENT_NAME" == "pull_request" ]]; then + echo "Nightly already points to same base; leave unchanged"; + echo "base-commit=skip" >> "$GITHUB_OUTPUT" + exit 0 + elif [[ "$GH_EVENT_NAME" == "push" ]]; then + LATEST_TAG=$(git describe --tags --abbrev=0) + + echo Looking for non-chore commits since last tag + echo + git --no-pager log --pretty=format:"%s" $LATEST_TAG..HEAD | grep -v "^chore" + if [ $? -eq 0 ]; then + echo + echo "Found non-chore commits; expecting release-plz to create a new release branch." + echo "base-commit=skip" >> "$GITHUB_OUTPUT" + exit 0 + else + echo "Push to main, but there are only chore commits since last tag." + echo "Not expecting a new release branch" + echo; + fi + fi + + echo "Updating nightly base"; + echo "base-commit=$NEW_BASE_COMMIT" >> "$GITHUB_OUTPUT" - - name: Bump crate versions - run: | - sed -i "s/^version = \"[^\"]*\"$/version = \"$VERSION\"/;" sdk/Cargo.toml - env: - VERSION: ${{ steps.set-nightly-version.outputs.version }} - - name: Install Rust toolchain + if: ${{ steps.choose-base.outputs.base-commit != 'skip' }} uses: dtolnay/rust-toolchain@stable with: components: cargo - - name: Create or update Cargo.lock + - name: Add -nightly+(date)-(commit ID) prefix to crate versions + id: set-nightly-version + if: ${{ steps.choose-base.outputs.base-commit != 'skip' }} run: | + echo Force updating nightly branch to point to base branch + git branch -f nightly + + export NIGHTLY_SUFFIX=$(echo -nightly+`date +%F`-`git rev-parse --short HEAD`) + echo Will add nightly suffix $NIGHTLY_SUFFIX + + sed -i "s/^version = \"\\(.*\\)\"/version = \"\\1$NIGHTLY_SUFFIX\"/" sdk/Cargo.toml + cargo update -w git add -f Cargo.lock - - name: Report differences for "prepare (release)" commit - run: git diff + echo + echo Proposed changes: + git status + git diff - name: Commit Cargo.toml and Cargo.lock - uses: stefanzweifel/git-auto-commit-action@v4 + uses: stefanzweifel/git-auto-commit-action@v5 id: commit + if: ${{ steps.choose-base.outputs.base-commit != 'skip' }} with: branch: nightly push_options: '--force' - commit_message: Prepare ${{ steps.set-nightly-version.outputs.version }} release + commit_message: Prepare nightly release commit_user_name: Adobe CAI Team commit_user_email: noreply@adobe.com create_branch: true - - tests: - name: Unit tests - runs-on: ${{ matrix.os }} - needs: nightly-snapshot - - strategy: - fail-fast: false - matrix: - os: [windows-latest, macos-latest, ubuntu-latest] - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - with: - ref: nightly - - - name: Install Rust toolchain - uses: dtolnay/rust-toolchain@stable - - - name: Cache Rust dependencies - uses: Swatinem/rust-cache@v2 - - - name: Run unit tests (cross build) - run: cargo test --all-targets --all-features - - test-direct-minimal-versions: - name: Unit tests with minimum versions of direct dependencies - runs-on: ${{ matrix.os }} - needs: nightly-snapshot - - strategy: - fail-fast: false - matrix: - os: [windows-latest, macos-latest, ubuntu-latest] - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - with: - ref: nightly - - - name: Install Rust toolchain - uses: dtolnay/rust-toolchain@nightly - - - name: Cache Rust dependencies - uses: Swatinem/rust-cache@v2 - - - name: Run tests - run: cargo +nightly test -Z direct-minimal-versions --all-targets --all-features diff --git a/.github/workflows/pr_title.yml b/.github/workflows/pr_title.yml new file mode 100644 index 000000000..83043ba41 --- /dev/null +++ b/.github/workflows/pr_title.yml @@ -0,0 +1,104 @@ +name: PR title + +on: + pull_request: + types: [opened, synchronize, reopened, edited] + +jobs: + title_cc_validation: + name: Conventional commits validation + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Check PR title + env: + PR_TITLE: ${{ github.event.pull_request.title }} + run: | + # If this step fails, please expand this step for more information. + # + # You will need to revise this pull request's title to + # match the "summary" (first) line of a Conventional Commit message. + # This enables us to automatically generate a meaningful changelog. + # + # The summary line (and thus the PR title) must have this exact format + # (including punctuation): + # + # type(scope): description + # + # `type` describes the nature of changes you are making. This project + # requires the type to be one of these exact names: + # + # * fix + # * feat (will cause a minor version bump) + # * chore (will be omitted from changelog) + # * update + # * doc + # + # `scope` describes where the change is made. This project allows + # the scope to be omitted, but if it is present, it must be one of + # these exact names: + # + # * sdk (The primary C2PA Rust SDK) + # * export_schema + # * make_test_images + # + # If `scope` is omitted, the parenthesis must also be omitted. + # + # `description` is a short human-readable summary of the changes being made. + # + # This project enforces a few rules over and above the Conventional + # Commits definition of `description`: + # + # * The `description` must be non-empty. + # * The `description` must start with a capital letter or number. + # (Do not start `description` with a lower-case word.) + # * The `description` must not end with a period. + # + # This project does not currently enforce the following items, but + # we ask that you observe the following preferences in `description`: + # + # * The entire description should be written and capitalized as + # an English-language sentence, except (as noted earlier) that + # the trailing period must be omitted. + # * Any acronyms such as JSON or YAML should be capitalized as per + # common usage in English-language sentences. + # + # After you edit the PR title, this task will run again and the + # warning should go away if you have made acceptable changes. + # + # For more information on Conventional Commits, please see: + # + # https://www.conventionalcommits.org/en/v1.0.0/ + # + # ------------ (end of message) ------------ + + if echo "$PR_TITLE" | grep -E '^chore(\(.*\))?: release '; then + echo "Exception / OK: chore release pattern" + exit 0; + fi + + if echo "$PR_TITLE" | grep -E '^chore(\(deps\))?: bump '; then + echo "Exception / OK: Dependabot update pattern" + exit 0; + fi + + echo "Installing commitlint-rs. Please wait 30-40 seconds ..." + cargo install --quiet commitlint-rs + set -e + + echo   + echo   + echo --- commitlint results for PR title \"$PR_TITLE\" --- + echo   + + # Workaround for https://github.com/KeisukeYamashita/commitlint-rs/issues/355 + if echo "$PR_TITLE" | grep -E "^[a-z]+\(.*\): "; then + echo "$PR_TITLE" | commitlint -g .commitlintrc.yml + else + echo "$PR_TITLE" | commitlint -g .commitlintrc.no-scope.yml + fi + + echo "✅ PR title matches all enforced rules." diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml deleted file mode 100644 index ec54af2a0..000000000 --- a/.github/workflows/publish.yml +++ /dev/null @@ -1,108 +0,0 @@ -name: Publish release - -on: - workflow_dispatch: - -jobs: - publish: - concurrency: publish-mutex - runs-on: ubuntu-latest - steps: - - - name: Checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - token: ${{ secrets.GH_ADMIN_COMMIT_TOKEN }} - - - name: Get latest existing tag - uses: WyriHaximus/github-action-get-previous-tag@v1 - id: get-latest-tag - - - name: Set new version - uses: paulhatch/semantic-version@v5.2.1 - id: set-version - with: - tag_prefix: "v" - version_format: "${major}.${minor}.${patch}" - major_pattern: "(MAJOR)" - minor_pattern: "(MINOR)" - - - name: Generate changelog since last tag - id: generate-changelog - run: | - { - echo 'changelog<> "$GITHUB_OUTPUT" - - - name: Log version & changelog - run: | - echo "Version: $VERSION" - echo "Version tag: $VERSION_TAG" - echo "Latest tag detected: $LATEST_TAG" - echo "Changelog: $CHANGELOG" - env: - VERSION: ${{ steps.set-version.outputs.version }} - VERSION_TAG: ${{ steps.set-version.outputs.version_tag }} - LATEST_TAG: ${{ steps.get-latest-tag.outputs.tag }} - CHANGELOG: ${{ steps.generate-changelog.outputs.changelog }} - - - name: Prevent empty release - if: ${{ steps.generate-changelog.outputs.changelog == '' }} - uses: actions/github-script@v3 - with: - script: | - core.setFailed("No changes since prior release") - - - name: Update changelog - run: | - (head -8 CHANGELOG.md && echo "## $VERSION" && date "+_%d %B %Y_" && echo "" && (echo "$CHANGELOG" | sed -E 's_\(#([0-9]+)\)_([#\1](https://github.com/contentauth/c2pa-rs/pull/\1)\)_') && echo "" && tail -n +9 CHANGELOG.md) > CHANGELOG.new.md - mv CHANGELOG.new.md CHANGELOG.md - env: - VERSION: ${{ steps.set-version.outputs.version }} - CHANGELOG: ${{ steps.generate-changelog.outputs.changelog }} - - - name: Install Rust toolchain - uses: dtolnay/rust-toolchain@stable - - - name: Bump crate versions - run: | - sed -i "s/^version = \"[^\"]*\"$/version = \"$VERSION\"/;" sdk/Cargo.toml - sed -i "s/^version = \"[^\"]*\"$/version = \"$VERSION\"/;" export_schema/Cargo.toml - sed -i "s/^version = \"[^\"]*\"$/version = \"$VERSION\"/;" make_test_images/Cargo.toml - sed -i "s/^c2pa = \"[^\"]*\"$/c2pa = \"$VERSION\"/;" README.md - env: - VERSION: ${{ steps.set-version.outputs.version }} - - - name: Report differences for "prepare (release)" commit - run: git diff - - - name: Commit Cargo.toml and changelog - uses: stefanzweifel/git-auto-commit-action@v4 - id: commit - with: - commit_message: Prepare ${{ steps.set-version.outputs.version }} release - commit_user_name: Adobe CAI Team - commit_user_email: noreply@adobe.com - - - name: Ensure semantic versioning requirements are met - uses: obi1kenobi/cargo-semver-checks-action@v2 - with: - package: c2pa - - - name: Create release - uses: ncipollo/release-action@v1 - with: - body: ${{ steps.generate-changelog.outputs.changelog }} - commit: ${{ steps.commit.outputs.commit_hash }} - prerelease: true # remove at 1.0 - tag: ${{ steps.set-version.outputs.version_tag }} - token: ${{ secrets.GH_ADMIN_COMMIT_TOKEN }} - - - name: Publish crate - run: | - cargo publish --token $CRATES_IO_SECRET -p c2pa - env: - CRATES_IO_SECRET: ${{ secrets.CRATES_IO_SECRET }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..34b0be483 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,40 @@ +name: Release-plz + +permissions: + pull-requests: write + contents: write + +on: + push: + branches: + - main + +jobs: + release-plz: + name: Release-plz + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + token: ${{ secrets.RELEASE_PLZ_TOKEN }} + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Run release-plz + uses: MarcoIeni/release-plz-action@v0.5 + env: + GITHUB_TOKEN: ${{ secrets.RELEASE_PLZ_TOKEN }} + CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_SECRET }} + + - name: Clean up stale release-plz branches + run: | + git --no-pager branch --remote |\ + grep 'origin/release-plz-' |\ + sort -r |\ + tail -n +2 |\ + sed 's/origin\///' |\ + xargs -I {} git push origin --delete {} diff --git a/CHANGELOG.md b/CHANGELOG.md index 54258de9c..cb9ee3b1e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,123 @@ # Changelog -All changes to this project are documented in this file. +All notable changes to this project will be documented in this file. -This project adheres to [Semantic Versioning](https://semver.org), except that – as is typical in the Rust community – the minimum supported Rust version may be increased without a major version increase. +This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), except that – as is typical in the Rust community – the minimum supported Rust version may be increased without a major version increase. -Do not manually edit this file. It will be automatically updated when a new release is published. +Since version 0.36.2, the format of this changelog is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## [0.36.3](https://github.com/contentauth/c2pa-rs/compare/v0.36.2...v0.36.3) +_07 October 2024_ + +### Fixed + +* Changelog contained duplicate entries for 0.16.1 ([#618](https://github.com/contentauth/c2pa-rs/pull/618)) + +## [0.36.2](https://github.com/contentauth/c2pa-rs/compare/v0.36.1...v0.36.2) +_07 October 2024_ + +* No-op change to start using release-plz to manage releases + +## 0.36.1 +_04 October 2024_ + +* fix: Make sure algorithm is being respected in data_hash.rs ([#613](https://github.com/contentauth/c2pa-rs/pull/613)) +* fix: Make sure RSTn segment names are included in the data box hashes list ([#612](https://github.com/contentauth/c2pa-rs/pull/612)) +* Update mp4 requirement from 0.13.0 to 0.14.0 in /sdk ([#595](https://github.com/contentauth/c2pa-rs/pull/595)) +* chore: Add m4a test ([#606](https://github.com/contentauth/c2pa-rs/pull/606)) +* fix: Write absolute urls to manifest store resource references. ([#603](https://github.com/contentauth/c2pa-rs/pull/603)) +* doc: Removes xmp_write feature from README.md. +* chore: Remove deprecated actions-rs/clippy-check action ([#601](https://github.com/contentauth/c2pa-rs/pull/601)) +* chore: bump stefanzweifel/git-auto-commit-action from 4 to 5 ([#600](https://github.com/contentauth/c2pa-rs/pull/600)) +* chore: bump actions/github-script from 3 to 7 ([#599](https://github.com/contentauth/c2pa-rs/pull/599)) +* chore: bump paulhatch/semantic-version from 5.2.1 to 5.4.0 ([#598](https://github.com/contentauth/c2pa-rs/pull/598)) +* chore: Use Dependabot to upgrade GitHub Actions steps ([#597](https://github.com/contentauth/c2pa-rs/pull/597)) +* chore: Fix dependabot issues ([#594](https://github.com/contentauth/c2pa-rs/pull/594)) +* Fixes issue where nested assertion-uri-hash errors were being reported at the active manifest level. ([#593](https://github.com/contentauth/c2pa-rs/pull/593)) +* Feature/add content length to tsa request ([#587](https://github.com/contentauth/c2pa-rs/pull/587)) + +## 0.36.0 +_23 September 2024_ + +* (MINOR) ensures release bumps minor version ([#592](https://github.com/contentauth/c2pa-rs/pull/592)) +* fix: requires "StatusTracker" to implement "Send" ([#589](https://github.com/contentauth/c2pa-rs/pull/589)) + +## 0.35.1 +_17 September 2024_ + +* Fix error when trying to sign BMFF content with Builder. ([#582](https://github.com/contentauth/c2pa-rs/pull/582)) + +## 0.35.0 +_12 September 2024_ + +* upgrades to riff@2.0.0, preventing panic on invalid riff files ([#579](https://github.com/contentauth/c2pa-rs/pull/579)) +* EC signature DER support ([#581](https://github.com/contentauth/c2pa-rs/pull/581)) +* Update base64 requirement from 0.21.2 to 0.22.1 in /sdk ([#519](https://github.com/contentauth/c2pa-rs/pull/519)) +* (MINOR) Rust API enhancements and fixes. ([#575](https://github.com/contentauth/c2pa-rs/pull/575)) +* Fix GIF off by one with XMP ([#562](https://github.com/contentauth/c2pa-rs/pull/562)) + +## 0.34.0 +_30 August 2024_ + +* (MINOR) Fragmented BMFF media ([#572](https://github.com/contentauth/c2pa-rs/pull/572)) + +## 0.33.4 +_29 August 2024_ + +* Depend on url crate version 2.5.2 or newer ([#573](https://github.com/contentauth/c2pa-rs/pull/573)) + +## 0.33.3 +_17 August 2024_ + +* Inline certs for wasm test signer ([#564](https://github.com/contentauth/c2pa-rs/pull/564)) + +## 0.33.2 +_15 August 2024_ + +* Bmff write fix ([#552](https://github.com/contentauth/c2pa-rs/pull/552)) +* Fix remote embedding RIFF when specifying mime type ([#551](https://github.com/contentauth/c2pa-rs/pull/551)) +* Fix data hash out of bounds when using placeholder beyond stream length ([#546](https://github.com/contentauth/c2pa-rs/pull/546)) +* Adds embeddable apis and remote_url/no_embed options ([#537](https://github.com/contentauth/c2pa-rs/pull/537)) +* export_schema: add unstable_api feature ([#542](https://github.com/contentauth/c2pa-rs/pull/542)) +* Ingredient checks ([#529](https://github.com/contentauth/c2pa-rs/pull/529)) +* Add base_path field to Builder ([#539](https://github.com/contentauth/c2pa-rs/pull/539)) +* Export `AssertionDefinition` and `ActionTemplate` in public API ([#522](https://github.com/contentauth/c2pa-rs/pull/522)) + +## 0.33.1 +_30 July 2024_ + +* Use timestamp with OpenSSL validation to prevent check chain check er… ([#531](https://github.com/contentauth/c2pa-rs/pull/531)) +* Fix GIF `remove_cai_store_from_stream` behavior ([#524](https://github.com/contentauth/c2pa-rs/pull/524)) + +## 0.33.0 +_26 July 2024_ + +* Update crate to fix bad certificate dump content ([#525](https://github.com/contentauth/c2pa-rs/pull/525)) +* Introduce a mutex around the FFI calls to OpenSSL ([#516](https://github.com/contentauth/c2pa-rs/pull/516)) +* Bump bcder minimum version to 0.7.3 ([#526](https://github.com/contentauth/c2pa-rs/pull/526)) +* (MINOR) Updates needed for v2 JavaScript SDK ([#521](https://github.com/contentauth/c2pa-rs/pull/521)) +* Add region of interest assertion definition ([#506](https://github.com/contentauth/c2pa-rs/pull/506)) +* Fix CI tests ([#520](https://github.com/contentauth/c2pa-rs/pull/520)) +* Builder Archive update ([#507](https://github.com/contentauth/c2pa-rs/pull/507)) +* Update range-set requirement from 0.0.9 to 0.0.11 in /sdk ([#442](https://github.com/contentauth/c2pa-rs/pull/442)) +* Make sure reading past end of JUMBF box is an error ([#518](https://github.com/contentauth/c2pa-rs/pull/518)) +* added final details ([#517](https://github.com/contentauth/c2pa-rs/pull/517)) + +## 0.32.7 +_18 July 2024_ + +* Ensure Ingredient data_types make it to the store and back. ([#514](https://github.com/contentauth/c2pa-rs/pull/514)) +* draft security md ([#508](https://github.com/contentauth/c2pa-rs/pull/508)) +* Make data_types field optional when serializing data-box-map ([#512](https://github.com/contentauth/c2pa-rs/pull/512)) +* Fix box hash placeholder len (set to 1) ([#511](https://github.com/contentauth/c2pa-rs/pull/511)) +* Set data box placeholder len to at least 1 for GIF ([#510](https://github.com/contentauth/c2pa-rs/pull/510)) +* Rewind mp3 streams when reading/writing ([#509](https://github.com/contentauth/c2pa-rs/pull/509)) +* Update README.md ([#351](https://github.com/contentauth/c2pa-rs/pull/351)) +* Add GIF support ([#489](https://github.com/contentauth/c2pa-rs/pull/489)) +* Update image requirement from 0.24.7 to 0.25.1 in /make_test_images ([#445](https://github.com/contentauth/c2pa-rs/pull/445)) +* Upgrade uuid to 1.7.0 & fix removed wasm-bindgen feature ([#450](https://github.com/contentauth/c2pa-rs/pull/450)) +* Expose `SignatureInfo` publicly ([#501](https://github.com/contentauth/c2pa-rs/pull/501)) +* Cleanup empty/unused files + lints ([#500](https://github.com/contentauth/c2pa-rs/pull/500)) ## 0.32.6 _15 July 2024_ @@ -360,13 +473,6 @@ _22 February 2023_ _19 December 2022_ * Update xmp-toolkit from 0.6.0 to 1.0.0 ([#165](https://github.com/contentauth/c2pa-rs/pull/165)) -* Prepare 0.16.1 release -* Address new Clippy warnings for Rust 1.66 ([#164](https://github.com/contentauth/c2pa-rs/pull/164)) -* Create external manifests for unknown types ([#162](https://github.com/contentauth/c2pa-rs/pull/162)) - -## 0.16.1 -_19 December 2022_ - * Address new Clippy warnings for Rust 1.66 ([#164](https://github.com/contentauth/c2pa-rs/pull/164)) * Create external manifests for unknown types ([#162](https://github.com/contentauth/c2pa-rs/pull/162)) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a74f7b447..7b2879147 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -78,5 +78,5 @@ feel free to reach out to existing committers to have a conversation about that. ## Security issues -Security issues shouldn't be reported on this issue tracker. Instead, -[file an issue to our security experts](https://helpx.adobe.com/security/alertus.html). +Do not create a public GitHub issue for any suspected security vulnerabilities. Instead, please file an issue through [Adobe's HackerOne page](https://hackerone.com/adobe?type=team). +For more information on reporting security issues, see [SECURITY.md](SECURITY.md). diff --git a/README.md b/README.md index 116e63e82..15a262323 100644 --- a/README.md +++ b/README.md @@ -23,13 +23,28 @@ The library supports several common C2PA [assertions](https://c2pa.org/specifica This is a beta release (version 0.x.x) of the project. The minor version number (0.x.0) is incremented when there are breaking API changes, which may happen frequently. +### New API + +The library has a new API in development that will eventually replace the existing methods of reading and writing C2PA data. Ultimately, it will support all language bindings and build environments. To use this API, enable the `unstable_api` feature; for example: + +``` +c2pa = {version="0.33.1", features=["unstable_api"]} +``` + +The new API focuses on streaming I/O and supports the following structs: +- [Builder](https://docs.rs/c2pa/latest/c2pa/struct.Builder.html) +- [Reader](https://docs.rs/c2pa/latest/c2pa/struct.Reader.html) +- [ManifestDefinition](https://docs.rs/c2pa/latest/c2pa/struct.ManifestDefinition.html) + +For some informal development and ussage notes, see [2024_API_NOTES.md](https://github.com/contentauth/c2pa-rs/blob/main/2024_API_NOTES.md). + ### Contributions and feedback We welcome contributions to this project. For information on contributing, providing feedback, and about ongoing work, see [Contributing](https://github.com/contentauth/c2pa-js/blob/main/CONTRIBUTING.md). ## Requirements -The library requires **Rust version 1.74.0** or newer. +The library requires **Rust version 1.76.0** or newer. ### Supported platforms @@ -68,7 +83,7 @@ Add this to your `Cargo.toml`: ```toml [dependencies] -c2pa = "0.32.6" +c2pa = "0.36.1" ``` If you want to read or write a manifest file, add the `file_io` dependency to your `Cargo.toml`. @@ -87,12 +102,11 @@ The Rust library crate provides: * `file_io` enables manifest generation, signing via OpenSSL, and embedding manifests in various file formats. * `add_thumbnails` will generate thumbnails automatically for JPEG and PNG files. (no longer included with `file_io`) * `serialize_thumbnails` includes binary thumbnail data in the [Serde](https://serde.rs/) serialization output. -* `xmp_write` enables updating XMP on embed with the `dcterms:provenance` field. (Requires [xmp_toolkit](https://crates.io/crates/xmp_toolkit).) * `no_interleaved_io` forces fully-synchronous I/O; otherwise, the library uses threaded I/O for some operations to improve performance. * `fetch_remote_manifests` enables the verification step to retrieve externally referenced manifest stores. External manifests are only fetched if there is no embedded manifest store and no locally adjacent .c2pa manifest store file of the same name. * `json_schema` is used by `make schema` to produce a JSON schema document that represents the `ManifestStore` data structures. * `psxxx_ocsp_stapling_experimental` this is an demonstration feature that will attempt to fetch the OCSP data from the OCSP responders listed in the manifest signing certificate. The response becomes part of the manifest and is used to prove the certificate was not revoked at the time of signing. This is only implemented for PS256, PS384 and PS512 signatures and is intended as a demonstration. - +* `openssl_ffi_mutex` prevents multiple threads from accessing the C OpenSSL library simultaneously. (This library is not re-entrant.) In a multi-threaded process (such as Cargo's test runner), this can lead to unpredictable behavior. ## Example code diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..08811c0fe --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,22 @@ +# Security + +This C2PA open-source library is maintained in partnership with Adobe. At this time, Adobe is taking point on accepting security reports through its HackerOne portal and public bug bounty program. + +## Reporting a vulnerability + +Please do not create a public GitHub issue for any suspected security vulnerabilities. Instead, please file an issue through [Adobe's HackerOne page](https://hackerone.com/adobe?type=team). If for some reason this is not possible, reach out to cai-security@adobe.com. + + +## Vulnerability SLAs + +Once we receive an actionable vulnerability (meaning there is an available patch, or a code fix is required), we will acknowledge the vulnerability within 24 hours. Our target SLAs for resolution are: + +1. 72 hours for vulnerabilities with a CVSS score of 9.0-10.0 +2. 2 weeks for vulnerabilities with a CVSS score of 7.0-8.9 + +Any vulnerability with a score below 6.9 will be resolved when possible. + + +## C2PA Vulnerabilities + +This library is not meant to address any potential vulnerabilities within the C2PA specification itself. It is only an implementation of the spec as written. Any suspected vulnerabilities within the spec can be reported [here](https://github.com/c2pa-org/specifications/issues). diff --git a/export_schema/Cargo.toml b/export_schema/Cargo.toml index e8947552b..f5baf9e3b 100644 --- a/export_schema/Cargo.toml +++ b/export_schema/Cargo.toml @@ -1,13 +1,13 @@ [package] name = "export_schema" -version = "0.32.6" +version = "0.36.1" authors = ["Dave Kozma "] license = "MIT OR Apache-2.0" edition = "2018" -rust-version = "1.74.0" +rust-version = "1.76.0" [dependencies] anyhow = "1.0.40" -c2pa = { path = "../sdk", features = ["file_io", "json_schema"] } -schemars = "0.8.13" -serde_json = "1.0.114" +c2pa = { path = "../sdk", features = ["json_schema", "unstable_api"] } +schemars = "0.8.21" +serde_json = "1.0.117" diff --git a/export_schema/src/main.rs b/export_schema/src/main.rs index ad41a95b1..b1bdb25f9 100644 --- a/export_schema/src/main.rs +++ b/export_schema/src/main.rs @@ -1,19 +1,31 @@ use std::{fs, path::Path}; use anyhow::Result; -use c2pa::ManifestStore; -use schemars::gen::SchemaSettings; +use c2pa::{settings::Settings, Builder, ManifestDefinition, ManifestStore}; +use schemars::{schema::RootSchema, schema_for}; -fn main() -> Result<()> { - println!("Exporting JSON schema"); - let settings = SchemaSettings::draft07(); - let gen = settings.into_generator(); - let schema = gen.into_root_schema_for::(); - let output = serde_json::to_string_pretty(&schema).expect("Failed to serialize schema"); +fn write_schema(schema: &RootSchema, name: &str) { + println!("Exporting JSON schema for {}", name); + let output = serde_json::to_string_pretty(schema).expect("Failed to serialize schema"); let output_dir = Path::new("./target/schema"); fs::create_dir_all(output_dir).expect("Could not create schema directory"); - let output_path = output_dir.join("ManifestStore.schema.json"); + let output_path = output_dir.join(format!("{}.schema.json", name)); fs::write(&output_path, output).expect("Unable to write schema"); println!("Wrote schema to {}", output_path.display()); +} + +fn main() -> Result<()> { + let builder = schema_for!(Builder); + write_schema(&builder, "Builder"); + + let manifest_definition = schema_for!(ManifestDefinition); + write_schema(&manifest_definition, "ManifestDefinition"); + + let manifest_store = schema_for!(ManifestStore); + write_schema(&manifest_store, "ManifestStore"); + + let settings = schema_for!(Settings); + write_schema(&settings, "Settings"); + Ok(()) } diff --git a/make_test_images/Cargo.toml b/make_test_images/Cargo.toml index 741265794..d1e9599ce 100644 --- a/make_test_images/Cargo.toml +++ b/make_test_images/Cargo.toml @@ -1,20 +1,22 @@ [package] name = "make_test_images" -version = "0.32.6" +version = "0.36.1" authors = ["Gavin Peacock "] license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.74.0" +rust-version = "1.76.0" [dependencies] anyhow = "1.0.40" c2pa = { path = "../sdk", default-features = false, features = [ - "openssl", + "file_io", + "openssl_sign", "unstable_api", + "file_io", ] } env_logger = "0.11" -log = "0.4.8" -image = { version = "0.25.1", default-features = false, features = [ +log = "0.4.8" +image = { version = "0.25.2", default-features = false, features = [ "jpeg", "png", ] } @@ -22,5 +24,5 @@ memchr = "2.7.1" nom = "7.1.3" regex = "1.5.6" serde = "1.0.197" -serde_json = { version = "1.0.114", features = ["preserve_order"] } +serde_json = { version = "1.0.117", features = ["preserve_order"] } tempfile = "3.10.1" diff --git a/make_test_images/src/compare_manifests.rs b/make_test_images/src/compare_manifests.rs index 823a4f766..731219539 100644 --- a/make_test_images/src/compare_manifests.rs +++ b/make_test_images/src/compare_manifests.rs @@ -183,7 +183,10 @@ fn compare_json_values( || path.ends_with(".instanceId") || path.ends_with(".time") || path.contains(".hash") - || val1.is_string() && val2.is_string() && val1.to_string().contains(":urn:uuid:")) + || val1.is_string() + && val2.is_string() + && (val1.to_string().contains(":urn:uuid:") + || val2.to_string().contains(":urn:uuid:"))) { if val2.is_null() { issues.push(format!("Missing {}: {}", path, val1)); diff --git a/make_test_images/src/make_test_images.rs b/make_test_images/src/make_test_images.rs index 3af2281f1..303b84e24 100644 --- a/make_test_images/src/make_test_images.rs +++ b/make_test_images/src/make_test_images.rs @@ -23,7 +23,7 @@ use anyhow::{Context, Result}; use c2pa::{ create_signer, jumbf_io::{get_supported_types, load_jumbf_from_stream, save_jumbf_to_stream}, - Builder, Error, Reader, Signer, SigningAlg, + Builder, Error, Ingredient, Reader, Relationship, Signer, SigningAlg, }; use memchr::memmem; use nom::AsBytes; @@ -208,7 +208,7 @@ impl MakeTestImages { fn add_ingredient_from_file( builder: &mut Builder, path: &Path, - relationship: &str, + relationship: Relationship, ) -> Result { let mut source = fs::File::open(path).context("opening ingredient")?; let name = path @@ -222,20 +222,18 @@ impl MakeTestImages { .into_owned(); let format = extension_to_mime(&extension).unwrap_or("image/jpeg"); - let json = json!({ - "title": name, - "relationship": relationship, - }) - .to_string(); - - let ingredient = builder.add_ingredient(&json, format, &mut source)?; - if ingredient.thumbnail_ref().is_none() { + let mut parent = Ingredient::from_stream(format, &mut source)?; + parent.set_relationship(relationship); + parent.set_title(name); + if parent.thumbnail_ref().is_none() { source.rewind()?; let (format, thumbnail) = make_thumbnail_from_stream(format, &mut source).context("making thumbnail")?; - ingredient.set_thumbnail(format, thumbnail)?; + parent.set_thumbnail(format, thumbnail)?; } + builder.add_ingredient(parent); + Ok( builder.definition.ingredients[builder.definition.ingredients.len() - 1] .instance_id() @@ -300,7 +298,7 @@ impl MakeTestImages { let src_path = &self.make_path(src); let instance_id = - Self::add_ingredient_from_file(&mut builder, src_path, "parentOf")?; + Self::add_ingredient_from_file(&mut builder, src_path, Relationship::ParentOf)?; actions.push(json!( { @@ -376,8 +374,11 @@ impl MakeTestImages { let instance_id = match ingredient_table.get(ing.as_str()) { Some(id) => id.to_string(), None => { - let instance_id = - Self::add_ingredient_from_file(&mut builder, ing_path, "componentOf")?; + let instance_id = Self::add_ingredient_from_file( + &mut builder, + ing_path, + Relationship::ComponentOf, + )?; ingredient_table.insert(ing, instance_id.clone()); instance_id } @@ -490,7 +491,7 @@ impl MakeTestImages { let mut builder = Builder::from_json(&json)?; let parent_name = file_name(&dst_path).ok_or(Error::BadParam("no filename".to_string()))?; - builder.add_ingredient( + builder.add_ingredient_from_stream( json!({ "title": parent_name, "relationship": "parentOf" diff --git a/make_test_images/src/make_thumbnail.rs b/make_test_images/src/make_thumbnail.rs index b59de4611..ed226be61 100644 --- a/make_test_images/src/make_thumbnail.rs +++ b/make_test_images/src/make_thumbnail.rs @@ -14,7 +14,7 @@ use std::io::{Read, Seek}; use anyhow::{Error, Result}; -use image::{io::Reader, ImageFormat}; +use image::{ImageFormat, ImageReader}; // max edge size allowed in pixels for thumbnail creation const THUMBNAIL_LONGEST_EDGE: u32 = 1024; @@ -29,7 +29,7 @@ pub fn make_thumbnail_from_stream( .or_else(|| ImageFormat::from_mime_type(format)) .ok_or(Error::msg(format!("format not supported {format}")))?; - let reader = Reader::with_format(std::io::BufReader::new(stream), format); + let reader = ImageReader::with_format(std::io::BufReader::new(stream), format); let mut img = reader.decode()?; let longest_edge = THUMBNAIL_LONGEST_EDGE; diff --git a/make_test_images/tests.json b/make_test_images/tests.json index 79e0ba341..bd01f3e09 100644 --- a/make_test_images/tests.json +++ b/make_test_images/tests.json @@ -25,6 +25,8 @@ { "op": "uri", "parent": "CA", "output": "E-uri-CA" }, { "op": "clm", "parent": "CAICAI", "output": "E-clm-CAICAI" }, { "op": "make", "ingredients": ["E-sig-CA"], "output": "CIE-sig-CA" }, + { "op": "make", "ingredients": ["E-uri-CA"], "output": "CAE-uri-CA" }, + { "op": "make", "ingredients": ["CAE-uri-CA"], "output": "CACAE-uri-CA" }, { "op": "uri", "parent": "CIE-sig-CA", "output": "E-uri-CIE-sig-CA" }, { "op": "make", "parent": "A.jpg", "ingredients": ["C", "A.jpg", "I.jpg", "CA", "CI", "CAI", "CICA"], "output": "CAIAIIICAICIICAIICICA" } ] diff --git a/release-plz.toml b/release-plz.toml new file mode 100644 index 000000000..37149073c --- /dev/null +++ b/release-plz.toml @@ -0,0 +1,57 @@ +[changelog] +body = """ + +## [{{ version | trim_start_matches(pat="v") }}]{%- if release_link -%}({{ release_link }}){% endif %} +_{{ timestamp | date(format="%d %B %Y") }}_ +{% for group, commits in commits | group_by(attribute="group") -%} +{%- if group != "chore" %} +### {{ group | upper_first }} + +{% for commit in commits -%} +{%- if commit.scope and commit.scope != package -%} +* *({{commit.scope}})* {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message | upper_first }}{%- if commit.links %} ({% for link in commit.links %}[{{link.text}}]({{link.href}}) {% endfor -%}){% endif %} +{% else -%} +* {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message | upper_first }} +{% endif -%} +{%- endfor %} + +{%- endif %} + +{%- endfor -%} +""" + +commit_parsers = [ + { message = "^feat", group = "added" }, + { message = "^changed", group = "changed" }, + { message = "^deprecated", group = "deprecated" }, + { message = "^fix", group = "fixed" }, + { message = "^security", group = "security" }, + { message = "^chore", group = "chore" }, + { message = "^update", group = "updated dependencies" }, + { message = "^doc", group = "documented" }, + { message = "^.*", group = "other" }, +] + +[workspace] +dependencies_update = true +features_always_increment_minor = true +pr_labels = ["release"] +release_always = false +release_commits = "^(feat|fix|update|doc)[(:]" + +[[package]] +name = "c2pa" +changelog_path = "./CHANGELOG.md" +# This being the most important crate in the workspace, +# we will leave the changelog in the repo root instead +# of in the `sdk` folder (which would be the default). + +[[package]] +name = "export_schema" +publish = false +release = false + +[[package]] +name = "make_test_images" +publish = false +release = false diff --git a/sdk/Cargo.toml b/sdk/Cargo.toml index 924be3c03..ff5cc2403 100644 --- a/sdk/Cargo.toml +++ b/sdk/Cargo.toml @@ -1,13 +1,14 @@ [package] name = "c2pa" -version = "0.32.6" +version = "0.36.3" description = "Rust SDK for C2PA (Coalition for Content Provenance and Authenticity) implementors" authors = [ - "Maurice Fisher ", - "Gavin Peacock ", - "Eric Scouten ", - "Leonard Rosenthol ", - "Dave Kozma ", + "Maurice Fisher ", + "Gavin Peacock ", + "Eric Scouten ", + "Leonard Rosenthol ", + "Dave Kozma ", + "Dylan Ross " ] license = "MIT OR Apache-2.0" documentation = "https://docs.rs/c2pa" @@ -17,7 +18,7 @@ readme = "../README.md" keywords = ["xmp", "metadata"] categories = ["api-bindings"] edition = "2021" -rust-version = "1.74.0" +rust-version = "1.76.0" exclude = ["tests/fixtures"] [package.metadata.docs.rs] @@ -37,6 +38,7 @@ json_schema = ["dep:schemars"] pdf = ["dep:lopdf"] v1_api = [] unstable_api = [] +openssl_ffi_mutex = [] # The diagnostics feature is unsupported and might be removed. # It enables some low-overhead timing features used in our development cycle. @@ -65,104 +67,107 @@ crate-type = ["lib"] [dependencies] asn1-rs = "0.5.2" async-generic = "1.1" +async-recursion = "1.1.1" async-trait = { version = "0.1.77" } atree = "0.5.2" -base64 = "0.21.2" -bcder = "0.7.1" -bytes = "1.4.0" +base64 = "0.22.1" +bcder = "0.7.3" +bytes = "1.7.2" byteorder = { version = "1.4.3", default-features = false } byteordered = "0.6.0" -chrono = { version = "0.4.27", default-features = false, features = [ - "serde", - "wasmbind", +chrono = { version = "0.4.38", default-features = false, features = [ + "serde", + "wasmbind", ] } ciborium = "0.2.0" config = { version = "0.14.0", default-features = false, features = [ - "json", - "json5", - "toml", - "ron", - "ini", + "json", + "json5", + "toml", + "ron", + "ini", ] } conv = "0.3.3" coset = "0.3.1" extfmt = "0.1.1" +ed25519-dalek = "2.1.1" fast-xml = "0.23.1" hex = "0.4.3" # Version 1.13.0 doesn't compile under Rust < 1.75, pinning to 1.12.0 -id3 = "=1.12.0" +id3 = "=1.14.0" img-parts = "0.3.0" jfifdump = "0.5.1" log = "0.4.8" lopdf = { version = "0.31.0", optional = true } lazy_static = "1.4.0" memchr = "2.7.1" -multibase = "0.9.0" -multihash = "0.11.4" -mp4 = "0.13.0" +mp4 = "0.14.0" pem = "3.0.2" png_pong = "0.9.1" rand = "0.8.5" rand_chacha = "0.3.1" -range-set = "0.0.9" -rasn-ocsp = "0.12.5" -rasn-pkix = "0.12.5" -rasn = "0.12.5" -riff = "1.0.1" -schemars = { version = "0.8.13", optional = true } +range-set = "0.0.11" +rasn-ocsp = "0.18.0" +rasn-pkix = "0.18.0" +rasn = "0.18.0" +riff = "2.0.0" +schemars = { version = "0.8.21", optional = true } serde = { version = "1.0.197", features = ["derive"] } serde_bytes = "0.11.5" serde_cbor = "0.11.1" serde_derive = "1.0.197" -serde_json = { version = "1.0.114", features = ["preserve_order"] } +serde_json = { version = "1.0.117", features = ["preserve_order"] } serde_with = "3.4.0" serde-transcode = "1.1.1" -sha2 = "0.10.2" +sha1 = "0.10.6" +sha2 = "0.10.6" tempfile = "3.10.1" thiserror = "1.0.61" treeline = "0.1.0" -url = "2.2.2, <2.5.1" # Can't use 2.5.1 or newer until new license is reviewed. -uuid = { version = "1.7.0", features = ["serde", "v4", "js"] } -x509-parser = "0.15.1" -x509-certificate = "0.19.0" +url = "2.5.2" +uuid = { version = "1.10.0", features = ["serde", "v4", "js"] } +x509-parser = "0.16.0" +x509-certificate = "0.21.0" zip = { version = "0.6.6", default-features = false } + [target.'cfg(not(target_arch = "wasm32"))'.dependencies] ureq = "2.4.0" image = { version = "0.24.7", default-features = false, features = [ - "jpeg", - "png", + "jpeg", + "png", ], optional = true } instant = "0.1.12" openssl = { version = "0.10.61", features = ["vendored"], optional = true } [target.'cfg(target_arch = "wasm32")'.dependencies] console_log = { version = "1.0.0", features = ["color"] } -ed25519-dalek = "2.1.1" getrandom = { version = "0.2.7", features = ["js"] } # We need to use the `inaccurate` flag here to ensure usage of the JavaScript Date API # to handle certificate timestamp checking correctly. instant = { version = "0.1.12", features = ["wasm-bindgen", "inaccurate"] } js-sys = "0.3.58" -rsa = "0.6.1" +rand_core = "0.9.0-alpha.2" +rsa = { version = "0.9.6", features = ["sha2"] } serde-wasm-bindgen = "0.5.0" -spki = "0.6.0" +spki = "0.7.3" wasm-bindgen = "0.2.83" wasm-bindgen-futures = "0.4.31" web-sys = { version = "0.3.58", features = [ - "Crypto", - "SubtleCrypto", - "CryptoKey", - "Window", - "WorkerGlobalScope", + "Crypto", + "SubtleCrypto", + "CryptoKey", + "Window", + "WorkerGlobalScope", ] } [dev-dependencies] anyhow = "1.0.40" mockall = "0.11.2" c2pa = { path = ".", features = [ - "unstable_api", + "unstable_api", ] } # allow integration tests to use the new API +glob = "0.3.1" jumbf = "0.4.0" @@ -171,5 +176,4 @@ wasm-bindgen-test = "0.3.31" [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies] actix = "0.13.1" -ed25519-dalek = "2.1.1" tokio = { version = "1.36.0", features = ["full"] } diff --git a/sdk/examples/client/client.rs b/sdk/examples/client/client.rs index 7dba38c23..7f2b7e834 100644 --- a/sdk/examples/client/client.rs +++ b/sdk/examples/client/client.rs @@ -18,18 +18,18 @@ use std::path::PathBuf; use anyhow::Result; use c2pa::{ assertions::{c2pa_action, labels, Action, Actions, CreativeWork, Exif, SchemaDotOrgPerson}, - create_signer, Ingredient, Manifest, Reader as ManifestStore, SigningAlg, + create_signer, Builder, ClaimGeneratorInfo, Ingredient, Reader, Relationship, SigningAlg, }; const GENERATOR: &str = "test_app/0.1"; const INDENT_SPACE: usize = 2; // Example for reading the contents of a manifest store, recursively showing nested manifests -fn show_manifest(manifest_store: &ManifestStore, manifest_label: &str, level: usize) -> Result<()> { +fn show_manifest(reader: &Reader, manifest_label: &str, level: usize) -> Result<()> { let indent = " ".repeat(level * INDENT_SPACE); println!("{indent}manifest_label: {manifest_label}"); - if let Some(manifest) = manifest_store.get_manifest(manifest_label) { + if let Some(manifest) = reader.get_manifest(manifest_label) { println!( "{}title: {} , format: {}, instance_id: {}", indent, @@ -66,8 +66,17 @@ fn show_manifest(manifest_store: &ManifestStore, manifest_label: &str, level: us for ingredient in manifest.ingredients().iter() { println!("{}Ingredient title:{}", indent, ingredient.title()); + if let Some(validation_status) = ingredient.validation_status() { + for status in validation_status { + println!( + "Ingredient validation status: {}: {}", + status.code(), + status.explanation().unwrap_or_default() + ); + } + } if let Some(label) = ingredient.active_manifest() { - show_manifest(manifest_store, label, level + 1)?; + show_manifest(reader, label, level + 1)?; } } } @@ -88,11 +97,11 @@ pub fn main() -> Result<()> { let source = PathBuf::from(src); let dest = PathBuf::from(dst); // if a filepath was provided on the command line, read it as a parent file - let parent = Ingredient::from_file(source.as_path())?; - + let mut parent = Ingredient::from_file(source.as_path())?; + parent.set_relationship(Relationship::ParentOf); // create an action assertion stating that we imported this file let actions = Actions::new().add_action( - Action::new(c2pa_action::PLACED) + Action::new(c2pa_action::OPENED) .set_parameter("identifier", parent.instance_id().to_owned())?, ); @@ -115,31 +124,38 @@ pub fn main() -> Result<()> { )?; // create a new Manifest - let mut manifest = Manifest::new(GENERATOR.to_owned()); - // add parent and assertions - manifest - .set_parent(parent)? - .add_assertion(&actions)? - .add_assertion(&creative_work)? - .add_assertion(&exif)?; + let mut builder = Builder::new(); + builder + .set_claim_generator_info(ClaimGeneratorInfo::new(GENERATOR)) + .add_ingredient(parent) + .add_assertion(Actions::LABEL, &actions)? + .add_assertion(CreativeWork::LABEL, &creative_work)? + .add_assertion(Exif::LABEL, &exif)?; // sign and embed into the target file let signcert_path = "sdk/tests/fixtures/certs/es256.pub"; let pkey_path = "sdk/tests/fixtures/certs/es256.pem"; let signer = create_signer::from_files(signcert_path, pkey_path, SigningAlg::Es256, None)?; - manifest.embed(&source, &dest, &*signer)?; + builder.sign_file(&*signer, &source, &dest)?; - let manifest_store = ManifestStore::from_file(&dest)?; + let reader = Reader::from_file(&dest)?; // example of how to print out the whole manifest as json - println!("{manifest_store}\n"); - - // walk through the manifest and access data. + println!("{reader}\n"); - if let Some(manifest_label) = manifest_store.active_label() { - show_manifest(&manifest_store, manifest_label, 0)?; + // walk through the manifests and show the contents + if let Some(manifest_label) = reader.active_label() { + show_manifest(&reader, manifest_label, 0)?; + } + if let Some(validation_status) = reader.validation_status() { + for status in validation_status { + println!( + "Validation status: {}: {}", + status.code(), + status.explanation().unwrap_or_default() + ); + } } - Ok(()) } diff --git a/sdk/examples/data_hash.rs b/sdk/examples/data_hash.rs index 5bdcf31bc..3f2825e34 100644 --- a/sdk/examples/data_hash.rs +++ b/sdk/examples/data_hash.rs @@ -16,68 +16,44 @@ #[cfg(not(target_arch = "wasm32"))] use std::{ - io::{Read, Seek, Write}, - path::PathBuf, + io::{Cursor, Read, Seek, Write}, + path::{Path, PathBuf}, }; #[cfg(not(target_arch = "wasm32"))] use c2pa::{ - assertions::{c2pa_action, Action, Actions, CreativeWork, DataHash, Exif, SchemaDotOrgPerson}, - create_signer, hash_stream_by_alg, HashRange, Ingredient, Manifest, ManifestStore, SigningAlg, + assertions::{ + c2pa_action, labels::*, Action, Actions, CreativeWork, DataHash, Exif, SchemaDotOrgPerson, + }, + create_signer, hash_stream_by_alg, Builder, ClaimGeneratorInfo, HashRange, Ingredient, Reader, + Relationship, Result, SigningAlg, }; -fn main() { +fn main() -> std::result::Result<(), Box> { println!("DataHash demo"); #[cfg(not(target_arch = "wasm32"))] - user_data_hash_with_sdk_hashing(); - + user_data_hash_with_sdk_hashing()?; + println!("Done with SDK hashing1"); #[cfg(not(target_arch = "wasm32"))] - user_data_hash_with_user_hashing(); + user_data_hash_with_user_hashing()?; + println!("Done with SDK hashing2"); + Ok(()) } #[cfg(not(target_arch = "wasm32"))] -fn user_data_hash_with_sdk_hashing() { - const GENERATOR: &str = "test_app/0.1"; - - // You will often implement your own Signer trait to perform on device signing - let signcert_path = "sdk/tests/fixtures/certs/es256.pub"; - let pkey_path = "sdk/tests/fixtures/certs/es256.pem"; - let signer = - create_signer::from_files(signcert_path, pkey_path, SigningAlg::Es256, None).unwrap(); - - let src = "sdk/tests/fixtures/earth_apollo17.jpg"; - let dst = "target/tmp/output.jpg"; - - let source = PathBuf::from(src); - let dest = PathBuf::from(dst); - - let mut input_file = std::fs::OpenOptions::new() - .read(true) - .open(&source) - .unwrap(); - - let mut output_file = std::fs::OpenOptions::new() - .read(true) - .write(true) - .create(true) - .truncate(true) - .open(&dest) - .unwrap(); - - let parent = Ingredient::from_file(source.as_path()).unwrap(); - +fn builder_from_source>(source: S) -> Result { + let mut parent = Ingredient::from_file(source.as_ref())?; + parent.set_relationship(Relationship::ParentOf); // create an action assertion stating that we imported this file let actions = Actions::new().add_action( Action::new(c2pa_action::PLACED) - .set_parameter("identifier", parent.instance_id().to_owned()) - .unwrap(), + .set_parameter("identifier", parent.instance_id().to_owned())?, ); // build a creative work assertion - let creative_work = CreativeWork::new() - .add_author(SchemaDotOrgPerson::new().set_name("me").unwrap()) - .unwrap(); + let creative_work = + CreativeWork::new().add_author(SchemaDotOrgPerson::new().set_name("me")?)?; let exif = Exif::from_json_str( r#"{ @@ -91,161 +67,125 @@ fn user_data_hash_with_sdk_hashing() { "exif:GPSAltitude": "100963/29890", "exif:GPSTimeStamp": "2019-09-22T18:22:57Z" }"#, - ) - .unwrap(); - - // create a new Manifest - let mut manifest = Manifest::new(GENERATOR.to_owned()); - // add parent and assertions - manifest - .set_parent(parent) - .unwrap() - .add_assertion(&actions) - .unwrap() - .add_assertion(&creative_work) - .unwrap() - .add_assertion(&exif) - .unwrap(); + )?; - // get the composed manifest ready to insert into a file (returns manifest of same length as finished manifest) - let unfinished_manifest = manifest - .data_hash_placeholder(signer.reserve_size(), "jpg") - .unwrap(); + let mut builder = Builder::default(); - // Figure out where you want to put the manifest, let's put it at the beginning of the JPEG as first segment - // generate new file inserting unfinished manifest into file - input_file.rewind().unwrap(); - let mut before = vec![0u8; 2]; - input_file.read_exact(before.as_mut_slice()).unwrap(); + let mut claim_generator = ClaimGeneratorInfo::new("test_app".to_string()); + claim_generator.set_version("0.1"); - output_file.write_all(&before).unwrap(); + builder + .set_claim_generator_info(claim_generator) + .add_ingredient(parent) + .add_assertion(ACTIONS, &actions)? + .add_assertion_json(CREATIVE_WORK, &creative_work)? + .add_assertion_json(EXIF, &exif)?; - // write completed final manifest - output_file.write_all(&unfinished_manifest).unwrap(); + Ok(builder) +} - // write bytes after - let mut after_buf = Vec::new(); - input_file.read_to_end(&mut after_buf).unwrap(); - output_file.write_all(&after_buf).unwrap(); +#[cfg(not(target_arch = "wasm32"))] +fn user_data_hash_with_sdk_hashing() -> Result<()> { + // You will often implement your own Signer trait to perform on device signing + let signcert_path = "sdk/tests/fixtures/certs/es256.pub"; + let pkey_path = "sdk/tests/fixtures/certs/es256.pem"; + let signer = create_signer::from_files(signcert_path, pkey_path, SigningAlg::Es256, None)?; + + let src = "sdk/tests/fixtures/earth_apollo17.jpg"; + + let source = PathBuf::from(src); + + let mut builder = builder_from_source(&source)?; // c2pa::Builder::from_manifest_definition(manifest_definition(&source)?); + + let placeholder_manifest = + builder.data_hashed_placeholder(signer.reserve_size(), "image/jpeg")?; + + let bytes = std::fs::read(&source)?; + let mut output: Vec = Vec::with_capacity(bytes.len() + placeholder_manifest.len()); + + // Generate new file inserting unfinished manifest into file. + // Figure out where you want to put the manifest. + // Here we put it at the beginning of the JPEG as first segment after the 2 byte SOI marker. + let manifest_pos = 2; + output.extend_from_slice(&bytes[0..manifest_pos]); + output.extend_from_slice(&placeholder_manifest); + output.extend_from_slice(&bytes[manifest_pos..]); + + // make a stream from the output bytes + let mut output_stream = Cursor::new(output); // we need to add a data hash that excludes the manifest let mut dh = DataHash::new("my_manifest", "sha265"); - let hr = HashRange::new(2, unfinished_manifest.len()); - dh.add_exclusion(hr); + let hr = HashRange::new(manifest_pos, placeholder_manifest.len()); + dh.add_exclusion(hr.clone()); + + // Hash the bytes excluding the manifest we inserted + let hash = hash_stream_by_alg("sha256", &mut output_stream, Some([hr].to_vec()), true)?; + dh.set_hash(hash); // tell SDK to fill in the hash and sign to complete the manifest - output_file.rewind().unwrap(); - let final_manifest = manifest - .data_hash_embeddable_manifest(&dh, signer.as_ref(), "jpg", Some(&mut output_file)) - .unwrap(); + let final_manifest = builder.sign_data_hashed_embeddable(signer.as_ref(), &dh, "image/jpeg")?; // replace temporary manifest with final signed manifest // move to location where we inserted manifest, // note: temporary manifest and final manifest will be the same size - output_file.seek(std::io::SeekFrom::Start(2)).unwrap(); + output_stream.seek(std::io::SeekFrom::Start(2))?; // write completed final manifest bytes over temporary bytes - output_file.write_all(&final_manifest).unwrap(); + output_stream.write_all(&final_manifest)?; - // make sure the output file is correct - let manifest_store = ManifestStore::from_file(&dest).unwrap(); + output_stream.rewind()?; + // make sure the output stream is correct + let reader = Reader::from_stream("image/jpeg", &mut output_stream)?; // example of how to print out the whole manifest as json - println!("{manifest_store}\n"); + println!("{reader}\n"); + + Ok(()) } #[cfg(not(target_arch = "wasm32"))] -fn user_data_hash_with_user_hashing() { - const GENERATOR: &str = "test_app/0.1"; - +fn user_data_hash_with_user_hashing() -> Result<()> { // You will often implement your own Signer trait to perform on device signing let signcert_path = "sdk/tests/fixtures/certs/es256.pub"; let pkey_path = "sdk/tests/fixtures/certs/es256.pem"; - let signer = - create_signer::from_files(signcert_path, pkey_path, SigningAlg::Es256, None).unwrap(); + let signer = create_signer::from_files(signcert_path, pkey_path, SigningAlg::Es256, None)?; let src = "sdk/tests/fixtures/earth_apollo17.jpg"; - let dst = "target/tmp/output.jpg"; + let dst = "target/tmp/output_hashed.jpg"; let source = PathBuf::from(src); let dest = PathBuf::from(dst); - let mut input_file = std::fs::OpenOptions::new() - .read(true) - .open(&source) - .unwrap(); + let mut input_file = std::fs::OpenOptions::new().read(true).open(&source)?; let mut output_file = std::fs::OpenOptions::new() .read(true) .write(true) .create(true) .truncate(true) - .open(&dest) - .unwrap(); - - let parent = Ingredient::from_file(source.as_path()).unwrap(); - - // create an action assertion stating that we imported this file - let actions = Actions::new().add_action( - Action::new(c2pa_action::PLACED) - .set_parameter("identifier", parent.instance_id().to_owned()) - .unwrap(), - ); - - // build a creative work assertion - let creative_work = CreativeWork::new() - .add_author(SchemaDotOrgPerson::new().set_name("me").unwrap()) - .unwrap(); - - let exif = Exif::from_json_str( - r#"{ - "@context" : { - "exif": "http://ns.adobe.com/exif/1.0/" - }, - "exif:GPSVersionID": "2.2.0.0", - "exif:GPSLatitude": "39,21.102N", - "exif:GPSLongitude": "74,26.5737W", - "exif:GPSAltitudeRef": 0, - "exif:GPSAltitude": "100963/29890", - "exif:GPSTimeStamp": "2019-09-22T18:22:57Z" - }"#, - ) - .unwrap(); - - // create a new Manifest - let mut manifest = Manifest::new(GENERATOR.to_owned()); - // add parent and assertions - manifest - .set_parent(parent) - .unwrap() - .add_assertion(&actions) - .unwrap() - .add_assertion(&creative_work) - .unwrap() - .add_assertion(&exif) - .unwrap(); + .open(&dest)?; + let mut builder = builder_from_source(&source)?; // get the composed manifest ready to insert into a file (returns manifest of same length as finished manifest) - let unfinished_manifest = manifest - .data_hash_placeholder(signer.reserve_size(), "jpg") - .unwrap(); + let placeholder_manifest = + builder.data_hashed_placeholder(signer.reserve_size(), "image/jpeg")?; // Figure out where you want to put the manifest, let's put it at the beginning of the JPEG as first segment // we will need to add a data hash that excludes the manifest let mut dh = DataHash::new("my_manifest", "sha265"); - let hr = HashRange::new(2, unfinished_manifest.len()); + let hr = HashRange::new(2, placeholder_manifest.len()); dh.add_exclusion(hr); // since the only thing we are excluding in this example is the manifest we can just hash all the bytes // if you have additional exclusions you can add them to the DataHash and pass them to this function to be ' // excluded from the hash generation - let hash = hash_stream_by_alg("sha256", &mut input_file, None, true).unwrap(); + let hash = hash_stream_by_alg("sha256", &mut input_file, None, true)?; dh.set_hash(hash); - // tell SDK to fill we will provide the hash and sign to complete the manifest - let final_manifest = manifest - .data_hash_embeddable_manifest(&dh, signer.as_ref(), "jpg", None) - .unwrap(); + // tell SDK to fill in the hash and sign to complete the manifest + let final_manifest: Vec = + builder.sign_data_hashed_embeddable(signer.as_ref(), &dh, "image/jpeg")?; // generate new file inserting final manifest into file input_file.rewind().unwrap(); @@ -263,8 +203,11 @@ fn user_data_hash_with_user_hashing() { output_file.write_all(&after_buf).unwrap(); // make sure the output file is correct - let manifest_store = ManifestStore::from_file(&dest).unwrap(); + output_file.rewind()?; + let reader = Reader::from_stream("image/jpeg", output_file)?; // example of how to print out the whole manifest as json - println!("{manifest_store}\n"); + println!("{reader}\n"); + + Ok(()) } diff --git a/sdk/examples/v2api.rs b/sdk/examples/v2api.rs index 4577c5c97..98fd203d3 100644 --- a/sdk/examples/v2api.rs +++ b/sdk/examples/v2api.rs @@ -87,7 +87,7 @@ fn main() -> Result<()> { let json = manifest_def(title, format); let mut builder = Builder::from_json(&json)?; - builder.add_ingredient( + builder.add_ingredient_from_stream( json!({ "title": parent_name, "relationship": "parentOf" @@ -122,7 +122,8 @@ fn main() -> Result<()> { // unzip the manifest builder from the zipped stream zipped.rewind()?; - let ed_signer = |_context: *const (), data: &[u8]| ed_sign(data, PRIVATE_KEY); + let ed_signer = + |_context: *const (), data: &[u8]| CallbackSigner::ed25519_sign(data, PRIVATE_KEY); let signer = CallbackSigner::new(ed_signer, SigningAlg::Ed25519, CERTS); let mut builder = Builder::from_archive(&mut zipped)?; @@ -155,23 +156,6 @@ fn main() -> Result<()> { Ok(()) } -// Sign the data using the Ed25519 algorithm -fn ed_sign(data: &[u8], private_key: &[u8]) -> c2pa::Result> { - use ed25519_dalek::{Signature, Signer, SigningKey}; - use pem::parse; - - // Parse the PEM data to get the private key - let pem = parse(private_key).map_err(|e| c2pa::Error::OtherError(Box::new(e)))?; - // For Ed25519, the key is 32 bytes long, so we skip the first 16 bytes of the PEM data - let key_bytes = &pem.contents()[16..]; - let signing_key = - SigningKey::try_from(key_bytes).map_err(|e| c2pa::Error::OtherError(Box::new(e)))?; - // Sign the data - let signature: Signature = signing_key.sign(data); - - Ok(signature.to_bytes().to_vec()) -} - // #[cfg(feature = "openssl")] // use openssl::{error::ErrorStack, pkey::PKey}; // #[cfg(feature = "openssl")] diff --git a/sdk/src/assertion.rs b/sdk/src/assertion.rs index d37a9338c..3222a33da 100644 --- a/sdk/src/assertion.rs +++ b/sdk/src/assertion.rs @@ -471,14 +471,6 @@ impl Assertion { } } -#[allow(dead_code)] // TODO: temp, see #498 -#[derive(Serialize, Deserialize, Debug)] -pub(crate) struct JsonAssertionData { - label: String, - data: Value, - is_cbor: bool, -} - /// This error type is returned when an assertion can not be decoded. #[non_exhaustive] pub struct AssertionDecodeError { diff --git a/sdk/src/assertions/actions.rs b/sdk/src/assertions/actions.rs index 2377b5dc5..f873846a9 100644 --- a/sdk/src/assertions/actions.rs +++ b/sdk/src/assertions/actions.rs @@ -18,7 +18,7 @@ use serde_cbor::Value; use crate::{ assertion::{Assertion, AssertionBase, AssertionCbor}, - assertions::{labels, Actor, Metadata}, + assertions::{labels, region_of_interest::RegionOfInterest, Actor, Metadata}, error::Result, resource_store::UriOrResource, utils::cbor_types::DateT, @@ -90,7 +90,7 @@ impl From for SoftwareAgent { /// the action. /// /// See . -#[derive(Deserialize, Serialize, Clone, Debug, Default, PartialEq, Eq)] +#[derive(Deserialize, Serialize, Clone, Debug, Default, PartialEq)] pub struct Action { /// The label associated with this action. See ([`c2pa_action`]). action: String, @@ -113,7 +113,7 @@ pub struct Action { /// When tracking changes and the scope of the changed components is unknown, /// it should be assumed that anything might have changed. #[serde(skip_serializing_if = "Option::is_none")] - changes: Option>, + changes: Option>, /// The value of the `xmpMM:InstanceID` property for the modified (output) resource. #[serde(rename = "instanceId", skip_serializing_if = "Option::is_none")] @@ -190,6 +190,11 @@ impl Action { self.instance_id.as_deref() } + /// Returns the regions of interest that changed]. + pub fn changes(&self) -> Option<&[RegionOfInterest]> { + self.changes.as_deref() + } + /// Returns the additional parameters for this action. /// /// These vary by the type of action. @@ -313,6 +318,19 @@ impl Action { self.reason = Some(reason.into()); self } + + /// Adds a region of interest that changed. + pub fn add_change(mut self, region_of_interest: RegionOfInterest) -> Self { + match &mut self.changes { + Some(changes) => { + changes.push(region_of_interest); + } + _ => { + self.changes = Some(vec![region_of_interest]); + } + } + self + } } #[derive(Deserialize, Serialize, Debug, Default, PartialEq, Eq)] @@ -357,7 +375,7 @@ impl ActionTemplate { /// other information such as what software performed the action. /// /// See . -#[derive(Deserialize, Serialize, Debug, PartialEq, Eq)] +#[derive(Deserialize, Serialize, Debug, PartialEq)] #[non_exhaustive] pub struct Actions { /// A list of [`Action`]s. @@ -491,7 +509,10 @@ pub mod tests { use super::*; use crate::{ assertion::AssertionData, - assertions::metadata::{c2pa_source::GENERATOR_REE, DataSource, ReviewRating}, + assertions::{ + metadata::{c2pa_source::GENERATOR_REE, DataSource, ReviewRating}, + region_of_interest::{Range, RangeType, Time, TimeType}, + }, hashed_uri::HashedUri, }; @@ -540,7 +561,26 @@ pub mod tests { .set_parameter("name".to_owned(), "gaussian blur") .unwrap() .set_when("2015-06-26T16:43:23+0200") - .set_source_type("digsrctype:algorithmicMedia"), + .set_source_type("digsrctype:algorithmicMedia") + .add_change(RegionOfInterest { + region: vec![Range { + range_type: RangeType::Temporal, + shape: None, + time: Some(Time { + time_type: TimeType::Npt, + start: None, + end: None, + }), + frame: None, + text: None, + }], + name: None, + identifier: None, + region_type: None, + role: None, + description: None, + metadata: None, + }), ) .add_metadata( Metadata::new() @@ -552,7 +592,7 @@ pub mod tests { assert_eq!(original.actions.len(), 2); let assertion = original.to_assertion().expect("build_assertion"); assert_eq!(assertion.mime_type(), "application/cbor"); - assert_eq!(assertion.label(), Actions::LABEL); + assert_eq!(assertion.label(), format!("{}.v2", Actions::LABEL)); let result = Actions::from_assertion(&assertion).expect("extract_assertion"); assert_eq!(result.actions.len(), 2); @@ -571,6 +611,7 @@ pub mod tests { result.actions[1].source_type().unwrap(), "digsrctype:algorithmicMedia" ); + assert_eq!(result.actions[1].changes(), original.actions()[1].changes()); assert_eq!( result.metadata.unwrap().date_time(), original.metadata.unwrap().date_time() @@ -739,14 +780,6 @@ pub mod tests { "region": [ { "type": "temporal", - "time": {} - }, - { - "type": "identified", - "item": { - "identifier": "https://bioportal.bioontology.org/ontologies/FMA", - "value": "lips" - } } ] } @@ -782,10 +815,22 @@ pub mod tests { &SoftwareAgent::String("TestApp".to_string()) ); assert_eq!( - result.actions[3].changes.as_deref().unwrap()[0] - .get("description") - .unwrap(), - "translated to klingon" + result.actions[3].changes().unwrap(), + &[RegionOfInterest { + description: Some("translated to klingon".to_owned()), + region: vec![Range { + range_type: RangeType::Temporal, + shape: None, + time: None, + frame: None, + text: None + }], + name: None, + identifier: None, + region_type: None, + role: None, + metadata: None + }] ); } } diff --git a/sdk/src/assertions/bmff_hash.rs b/sdk/src/assertions/bmff_hash.rs index e9dca8e80..2fe26d80f 100644 --- a/sdk/src/assertions/bmff_hash.rs +++ b/sdk/src/assertions/bmff_hash.rs @@ -12,12 +12,10 @@ // each license. use std::{ - cmp, collections::{hash_map::Entry::Vacant, HashMap}, - fmt, fs, - io::{BufReader, Cursor, Read, Seek, SeekFrom}, + fmt, + io::{BufReader, Cursor, Read, Seek}, ops::Deref, - path::Path, }; use mp4::*; @@ -40,6 +38,7 @@ use crate::{ concat_and_hash, hash_stream_by_alg, vec_compare, verify_stream_by_alg, HashRange, Hasher, }, + io_utils::stream_len, merkle::C2PAMerkleTree, }, Error, @@ -109,7 +108,7 @@ impl<'de> Visitor<'de> for VecByteBufVisitor { where V: SeqAccess<'de>, { - let len = cmp::min(visitor.size_hint().unwrap_or(0), 4096); + let len = std::cmp::min(visitor.size_hint().unwrap_or(0), 4096); let mut byte_bufs: Vec = Vec::with_capacity(len); while let Some(b) = visitor.next_element()? { @@ -307,6 +306,10 @@ impl BmffHash { self.hash = Some(ByteBuf::from(hash)); } + pub fn clear_hash(&mut self) { + self.hash = None; + } + pub fn name(&self) -> Option<&String> { self.name.as_ref() } @@ -334,7 +337,7 @@ impl BmffHash { /// Generate the hash value for the asset using the range from the BmffHash. #[cfg(feature = "file_io")] - pub fn gen_hash(&mut self, asset_path: &Path) -> crate::error::Result<()> { + pub fn gen_hash(&mut self, asset_path: &std::path::Path) -> crate::error::Result<()> { let mut file = std::fs::File::open(asset_path)?; self.hash = Some(ByteBuf::from(self.hash_from_stream(&mut file)?)); Ok(()) @@ -381,6 +384,40 @@ impl BmffHash { } } + #[cfg(feature = "file_io")] + pub fn update_fragmented_inithash( + &mut self, + asset_path: &std::path::Path, + ) -> crate::error::Result<()> { + if let Some(mm) = &mut self.merkle { + let mut init_stream = std::fs::File::open(asset_path)?; + let mpd_mm = mm.get_mut(0).ok_or(Error::NotFound)?; + + let curr_alg = match &mpd_mm.alg { + Some(a) => a.clone(), + None => match &self.alg { + Some(a) => a.to_owned(), + None => "sha256".to_string(), + }, + }; + + let exclusions = bmff_to_jumbf_exclusions( + &mut init_stream, + &self.exclusions, + self.bmff_version > 1, + )?; + + init_stream.rewind()?; + let hash = hash_stream_by_alg(&curr_alg, &mut init_stream, Some(exclusions), true)?; + + mpd_mm.init_hash = Some(ByteBuf::from(hash)); + + Ok(()) + } else { + Err(Error::BadParam("expected MerkleMap object".to_string())) + } + } + pub fn verify_in_memory_hash( &self, data: &[u8], @@ -440,8 +477,13 @@ impl BmffHash { moof_list } - pub fn verify_hash(&self, asset_path: &Path, alg: Option<&str>) -> crate::error::Result<()> { - let mut data = fs::File::open(asset_path)?; + #[cfg(feature = "file_io")] + pub fn verify_hash( + &self, + asset_path: &std::path::Path, + alg: Option<&str>, + ) -> crate::error::Result<()> { + let mut data = std::fs::File::open(asset_path)?; self.verify_stream_hash(&mut data, alg) } @@ -722,6 +764,119 @@ impl BmffHash { Ok(()) } + #[cfg(feature = "file_io")] + pub fn verify_stream_segments( + &self, + init_stream: &mut dyn CAIRead, + fragment_paths: &Vec, + alg: Option<&str>, + ) -> crate::Result<()> { + let curr_alg = match &self.alg { + Some(a) => a.clone(), + None => match alg { + Some(a) => a.to_owned(), + None => "sha256".to_string(), + }, + }; + + // handle file level hashing + if self.hash().is_some() { + return Err(Error::HashMismatch( + "Hash value should not be present for a fragmented BMFF asset".to_string(), + )); + } + + // Merkle hashed BMFF + if let Some(mm_vec) = self.merkle() { + // inithash cache to prevent duplicate work. + let mut init_hashes = std::collections::HashSet::new(); + + for fp in fragment_paths { + let mut fragment_stream = std::fs::File::open(fp)?; + + // get merkle boxes from segment + let c2pa_boxes = read_bmff_c2pa_boxes(&mut fragment_stream)?; + let bmff_merkle = c2pa_boxes.bmff_merkle; + + if bmff_merkle.is_empty() { + return Err(Error::HashMismatch("Fragment had no MerkleMap".to_string())); + } + + for bmff_mm in bmff_merkle { + // find matching MerkleMap for this uniqueId & localId + if let Some(mm) = mm_vec.iter().find(|mm| { + mm.unique_id == bmff_mm.unique_id && mm.local_id == bmff_mm.local_id + }) { + let alg = match &mm.alg { + Some(a) => a, + None => &curr_alg, + }; + + // check the inithash (for fragmented MP4 with multiple files this is the hash of the init_segment minus any exclusions) + if let Some(init_hash) = &mm.init_hash { + let bmff_exclusions = &self.exclusions; + + let init_hash_str = extfmt::Hexlify(init_hash).to_string(); + if !init_hashes.contains(&init_hash_str) { + // convert BMFF exclusion map to flat exclusion list + init_stream.rewind()?; + let exclusions = bmff_to_jumbf_exclusions( + init_stream, + bmff_exclusions, + self.bmff_version > 1, + )?; + + if !verify_stream_by_alg( + alg, + init_hash, + init_stream, + Some(exclusions), + true, + ) { + return Err(Error::HashMismatch( + "BMFF inithash mismatch".to_string(), + )); + } + + init_hashes.insert(init_hash_str); + } + + // check the segments + fragment_stream.rewind()?; + let fragment_exclusions = bmff_to_jumbf_exclusions( + &mut fragment_stream, + bmff_exclusions, + self.bmff_version > 1, + )?; + + // hash the entire fragment minus exclusions + let hash = hash_stream_by_alg( + alg, + &mut fragment_stream, + Some(fragment_exclusions), + true, + )?; + + // check MerkleMap for the hash + if !mm.check_merkle_tree(alg, &hash, bmff_mm.location, &bmff_mm.hashes) + { + return Err(Error::HashMismatch("Fragment not valid".to_string())); + } + } + } else { + return Err(Error::HashMismatch("Fragment had no MerkleMap".to_string())); + } + } + } + } else { + return Err(Error::HashMismatch( + "Merkle value must be present for a fragmented BMFF asset".to_string(), + )); + } + + Ok(()) + } + // Used to verify fragmented BMFF assets spread across multiple file. pub fn verify_stream_segment( &self, @@ -818,6 +973,241 @@ impl BmffHash { Ok(()) } + + #[cfg(feature = "file_io")] + pub fn add_merkle_for_fragmented( + &mut self, + alg: &str, + asset_path: &std::path::Path, + fragment_paths: &Vec, + output_dir: &std::path::Path, + local_id: u32, + unique_id: Option, + ) -> crate::Result<()> { + let max_proofs: usize = 4; // todo: calculate (number of hashes to perform vs size of manifest) or allow to be set + + if !output_dir.exists() { + std::fs::create_dir_all(output_dir)?; + } else { + // make sure it is a directory + if !output_dir.is_dir() { + return Err(Error::BadParam("output_dir is not a directory".to_string())); + } + } + + let mut fragments = Vec::new(); + + let unique_id = match unique_id { + Some(id) => id, + None => local_id, + }; + + // copy to output folder saving paths to fragments and init segments + for file_path in fragment_paths { + fragments.push(file_path.as_path()); + + let output_path = output_dir.join( + file_path + .file_name() + .ok_or(Error::BadParam("file name not found".to_string()))?, + ); + std::fs::copy(file_path, output_path)?; + } + let output_path = output_dir.join( + asset_path + .file_name() + .ok_or(Error::BadParam("file name not found".to_string()))?, + ); + std::fs::copy(asset_path, output_path)?; + + // create dummy tree to figure out the layout and proof size + let dummy_tree = C2PAMerkleTree::dummy_tree(fragments.len(), alg); + + let mut location_to_fragment_map: HashMap = HashMap::new(); + + // copy to destination and insert placeholder C2PA Merkle box + for (location, seg) in (0_u32..).zip(fragments.iter()) { + let mut seg_reader = std::fs::File::open(seg)?; + + let c2pa_boxes = read_bmff_c2pa_boxes(&mut seg_reader)?; + let box_infos = &c2pa_boxes.box_infos; + + if box_infos.iter().filter(|b| b.path == "moof").count() != 1 { + return Err(Error::BadParam("expected 1 moof in fragment".to_string())); + } + + if box_infos.iter().filter(|b| b.path == "mdat").count() != 1 { + return Err(Error::BadParam("expected 1 mdat in fragment".to_string())); + } + + // we don't currently support adding to fragments with existing manifests + if !c2pa_boxes.bmff_merkle.is_empty() { + return Err(Error::BadParam( + "fragment already contains BmffMerkeMap".to_string(), + )); + } + + let mut mm = BmffMerkleMap { + unique_id, + local_id, + location, + hashes: None, + }; + + let proof = dummy_tree.get_proof_by_index(location as usize, max_proofs)?; + if !proof.is_empty() { + let mut proof_vec = Vec::new(); + for v in proof { + let bb = ByteBuf::from(v); + proof_vec.push(bb); + } + mm.hashes = Some(VecByteBuf(proof_vec)); + } + + let mm_cbor = serde_cbor::to_vec(&mm).map_err(|_err| Error::AssertionEncoding)?; + + // generate the UUID box + let mut uuid_box_data: Vec = Vec::with_capacity(mm_cbor.len() * 2); + crate::asset_handlers::bmff_io::write_c2pa_box( + &mut uuid_box_data, + &[], + false, + &mm_cbor, + )?; + + let first_moof = box_infos + .iter() + .find(|b| b.path == "moof") + .ok_or(Error::BadParam("expected 1 moof in fragment".to_string()))?; + + let mut source = std::fs::File::open(seg)?; + let output_filename = seg + .file_name() + .ok_or(Error::NotFound)? + .to_string_lossy() + .into_owned(); + let dest_path = output_dir.join(&output_filename); + let mut dest = std::fs::OpenOptions::new().write(true).open(&dest_path)?; + + // UUID to insert into output asset + crate::utils::io_utils::insert_data_at( + &mut source, + &mut dest, + first_moof.offset, + &uuid_box_data, + )?; + + // save file path for each which location in Merkle tree + location_to_fragment_map.insert(location, dest_path); + } + + // fill in actual hashes now that we have inserted the C2PA box. + let bmff_exclusions = &self.exclusions; + let mut leaves: Vec = Vec::with_capacity(fragments.len()); + for i in 0..fragments.len() as u32 { + if let Some(path) = location_to_fragment_map.get(&i) { + let mut fragment_stream = std::fs::File::open(path)?; + + let fragment_exclusions = bmff_to_jumbf_exclusions( + &mut fragment_stream, + bmff_exclusions, + self.bmff_version > 1, + )?; + + // hash the entire fragment minus fragment exclusions + let hash = + hash_stream_by_alg(alg, &mut fragment_stream, Some(fragment_exclusions), true)?; + + // add merkle leaf + leaves.push(crate::utils::merkle::MerkleNode(hash)); + } + } + + // gen final merkle tree + let m_tree = C2PAMerkleTree::from_leaves(leaves, alg, false); + for i in 0..fragments.len() as u32 { + if let Some(dest_path) = location_to_fragment_map.get(&i) { + let mut fragment_stream = std::fs::OpenOptions::new() + .read(true) + .write(true) + .open(dest_path)?; + + let c2pa_boxes = read_bmff_c2pa_boxes(&mut fragment_stream)?; + let merkle_box_infos = &c2pa_boxes.bmff_merkle_box_infos; + let merkle_boxes = &c2pa_boxes.bmff_merkle; + + if merkle_boxes.len() != 1 || merkle_box_infos.len() != 1 { + return Err(Error::InvalidAsset( + "mp4 fragment Merkle box count wrong".to_string(), + )); + } + + let mut bmff_mm = merkle_boxes[0].clone(); + let bmff_mm_info = &merkle_box_infos[0]; + + // get proof for this location and replace temp proof + let proof = m_tree.get_proof_by_index(bmff_mm.location as usize, max_proofs)?; + if !proof.is_empty() { + let mut proof_vec = Vec::new(); + for v in proof { + let bb = ByteBuf::from(v); + proof_vec.push(bb); + } + + bmff_mm.hashes = Some(VecByteBuf(proof_vec)); + } + + let mm_cbor = + serde_cbor::to_vec(&bmff_mm).map_err(|_err| Error::AssertionEncoding)?; + + // generate the C2PA Merkle box with final hash + let mut uuid_box_data: Vec = Vec::with_capacity(mm_cbor.len() * 2); + crate::asset_handlers::bmff_io::write_c2pa_box( + &mut uuid_box_data, + &[], + false, + &mm_cbor, + )?; + + // replace temp C2PA Merkle box + if uuid_box_data.len() == bmff_mm_info.size as usize { + fragment_stream.seek(std::io::SeekFrom::Start(bmff_mm_info.offset))?; + std::io::Write::write_all(&mut fragment_stream, &uuid_box_data)?; + } else { + return Err(Error::InvalidAsset( + "mp4 fragment Merkle box size does not match".to_string(), + )); + } + } + } + + // save desired Merkle tree row (for now complete tree) + let tree_row = std::cmp::min(max_proofs, m_tree.layers.len() - 1); + let merkle_row = m_tree.layers[tree_row].clone(); + let mut hashes = Vec::new(); + for mn in merkle_row { + let bb = ByteBuf::from(mn.0); + hashes.push(bb); + } + + let mm = MerkleMap { + unique_id, + local_id, + count: fragments.len() as u32, + alg: Some(alg.to_owned()), + init_hash: match alg { + // placeholder init hash to be filled once manifest is inserted + "sha256" => Some(ByteBuf::from([0u8; 32].to_vec())), + "sha384" => Some(ByteBuf::from([0u8; 48].to_vec())), + "sha512" => Some(ByteBuf::from([0u8; 64].to_vec())), + _ => return Err(Error::UnsupportedType), + }, + hashes: VecByteBuf(hashes), + }; + self.merkle = Some(vec![mm]); + + Ok(()) + } } impl AssertionCbor for BmffHash {} @@ -856,17 +1246,6 @@ fn stsc_index(track: &Mp4Track, sample_id: u32) -> crate::Result { Ok(track.trak.mdia.minf.stbl.stsc.entries.len() - 1) } -fn stream_len(reader: &mut dyn CAIRead) -> crate::Result { - let old_pos = reader.stream_position()?; - let len = reader.seek(SeekFrom::End(0))?; - - if old_pos != len { - reader.seek(SeekFrom::Start(old_pos))?; - } - - Ok(len) -} - /* we need shippable examples #[cfg(test)] pub mod tests { diff --git a/sdk/src/assertions/data_hash.rs b/sdk/src/assertions/data_hash.rs index 4d75ceff8..8f4737613 100644 --- a/sdk/src/assertions/data_hash.rs +++ b/sdk/src/assertions/data_hash.rs @@ -219,11 +219,17 @@ impl DataHash { return Err(Error::BadParam("asset hash is remote".to_owned())); } - let curr_alg = alg.unwrap_or("sha256"); + let curr_alg = match &self.alg { + Some(a) => a.clone(), + None => match alg { + Some(a) => a.to_owned(), + None => return Err(Error::HashMismatch("no alg specified".to_owned())), + }, + }; let exclusions = self.exclusions.as_ref().cloned(); - if verify_asset_by_alg(curr_alg, &self.hash, asset_path, exclusions) { + if verify_asset_by_alg(&curr_alg, &self.hash, asset_path, exclusions) { Ok(()) } else { Err(Error::HashMismatch("Hashes do not match".to_owned())) @@ -240,7 +246,7 @@ impl DataHash { Some(a) => a.clone(), None => match alg { Some(a) => a.to_owned(), - None => "sha256".to_string(), + None => return Err(Error::HashMismatch("no alg specified".to_owned())), }, }; diff --git a/sdk/src/assertions/exif.rs b/sdk/src/assertions/exif.rs index fe2eca008..c406de81a 100644 --- a/sdk/src/assertions/exif.rs +++ b/sdk/src/assertions/exif.rs @@ -36,6 +36,9 @@ pub struct Exif { } impl Exif { + // A label for our assertion, use reverse domain name syntax + pub const LABEL: &'static str = labels::EXIF; + pub fn new() -> Self { Self { object_context: Some(json!({ diff --git a/sdk/src/assertions/metadata.rs b/sdk/src/assertions/metadata.rs index a626c0631..ef35a2a5e 100644 --- a/sdk/src/assertions/metadata.rs +++ b/sdk/src/assertions/metadata.rs @@ -21,7 +21,7 @@ use serde_json::Value; use crate::{ assertion::{Assertion, AssertionBase, AssertionCbor}, - assertions::labels, + assertions::{labels, region_of_interest::RegionOfInterest}, error::Result, hashed_uri::HashedUri, utils::cbor_types::DateT, @@ -30,7 +30,7 @@ use crate::{ const ASSERTION_CREATION_VERSION: usize = 1; /// The Metadata structure can be used as part of other assertions or on its own to reference others -#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] #[cfg_attr(feature = "json_schema", derive(JsonSchema))] pub struct Metadata { #[serde(rename = "reviewRatings", skip_serializing_if = "Option::is_none")] @@ -41,6 +41,8 @@ pub struct Metadata { reference: Option, #[serde(rename = "dataSource", skip_serializing_if = "Option::is_none")] data_source: Option, + #[serde(rename = "regionOfInterest", skip_serializing_if = "Option::is_none")] + region_of_interest: Option, #[serde(flatten)] other: HashMap, } @@ -59,6 +61,7 @@ impl Metadata { )), reference: None, data_source: None, + region_of_interest: None, other: HashMap::new(), } } @@ -78,6 +81,11 @@ impl Metadata { self.data_source.as_ref() } + /// Returns the [`RegionOfInterest`] for this assertion if it exists. + pub fn region_of_interest(&self) -> Option<&RegionOfInterest> { + self.region_of_interest.as_ref() + } + /// Returns map containing custom metadata fields. pub fn other(&self) -> &HashMap { &self.other @@ -119,6 +127,12 @@ impl Metadata { self } + /// Sets the region of interest. + pub fn set_region_of_interest(mut self, region_of_interest: RegionOfInterest) -> Self { + self.region_of_interest = Some(region_of_interest); + self + } + /// Adds an additional key / value pair. pub fn insert(&mut self, key: &str, value: Value) -> &mut Self { self.other.insert(key.to_string(), value); @@ -290,6 +304,7 @@ pub struct DataBox { pub format: String, #[serde(with = "serde_bytes")] pub data: Vec, + #[serde(skip_serializing_if = "Option::is_none")] pub data_types: Option>, } @@ -299,12 +314,34 @@ pub mod tests { #![allow(clippy::unwrap_used)] use super::*; + use crate::assertions::region_of_interest::{Range, RangeType, Time, TimeType}; #[test] fn assertion_metadata() { let review = ReviewRating::new("foo", Some("bar".to_owned()), 3); let test_value = Value::from("test"); - let mut original = Metadata::new().add_review(review); + let mut original = + Metadata::new() + .add_review(review) + .set_region_of_interest(RegionOfInterest { + region: vec![Range { + range_type: RangeType::Temporal, + shape: None, + time: Some(Time { + time_type: TimeType::Npt, + start: None, + end: None, + }), + frame: None, + text: None, + }], + name: None, + identifier: None, + region_type: None, + role: None, + description: None, + metadata: None, + }); original.insert("foo", test_value); println!("{:?}", &original); let assertion = original.to_assertion().expect("build_assertion"); @@ -315,6 +352,10 @@ pub mod tests { assert_eq!(original.date_time, result.date_time); assert_eq!(original.reviews, result.reviews); assert_eq!(original.get("foo").unwrap(), "test"); + assert_eq!( + original.region_of_interest.as_ref(), + result.region_of_interest() + ) //assert_eq!(original.reviews.unwrap().len(), 1); } } diff --git a/sdk/src/assertions/mod.rs b/sdk/src/assertions/mod.rs index dae13620e..67865ae53 100644 --- a/sdk/src/assertions/mod.rs +++ b/sdk/src/assertions/mod.rs @@ -14,7 +14,7 @@ //! Assertion helpers to build, validate, and parse assertions. mod actions; -pub use actions::{c2pa_action, Action, Actions, SoftwareAgent}; +pub use actions::{c2pa_action, Action, ActionTemplate, Actions, SoftwareAgent}; mod bmff_hash; pub use bmff_hash::{BmffHash, BmffMerkleMap, DataMap, ExclusionsMap, SubsetMap}; @@ -33,7 +33,8 @@ pub use exif::Exif; #[allow(dead_code)] // will become public later mod ingredient; -pub(crate) use ingredient::{Ingredient, Relationship}; +pub(crate) use ingredient::Ingredient; +pub use ingredient::Relationship; pub mod labels; @@ -57,3 +58,5 @@ pub(crate) use user_cbor::UserCbor; mod uuid_assertion; #[allow(unused_imports)] pub(crate) use uuid_assertion::Uuid; + +pub mod region_of_interest; diff --git a/sdk/src/assertions/region_of_interest.rs b/sdk/src/assertions/region_of_interest.rs new file mode 100644 index 000000000..349299152 --- /dev/null +++ b/sdk/src/assertions/region_of_interest.rs @@ -0,0 +1,247 @@ +//! A set of structs to define a region of interest within an +//! [`Action`][crate::assertions::Action] or [`Metadata`]. + +#[cfg(feature = "json_schema")] +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use serde_with::skip_serializing_none; + +use super::Metadata; + +/// An x, y coordinate used for specifying vertices in polygons. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[cfg_attr(feature = "json_schema", derive(JsonSchema))] +pub struct Coordinate { + /// The coordinate along the x-axis. + pub x: f64, + /// The coordinate along the y-axis. + pub y: f64, +} + +/// The type of shape for the range. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[cfg_attr(feature = "json_schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase")] +pub enum ShapeType { + /// A rectangle. + Rectangle, + /// A circle. + Circle, + /// A polygon. + Polygon, +} + +/// The type of unit for the range. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[cfg_attr(feature = "json_schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase")] +pub enum UnitType { + /// Use pixels. + Pixel, + /// Use percentage. + Percent, +} + +/// A spatial range representing rectangle, circle, or a polygon. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[cfg_attr(feature = "json_schema", derive(JsonSchema))] +#[skip_serializing_none] +pub struct Shape { + /// The type of shape. + #[serde(rename = "type")] + pub shape_type: ShapeType, + /// The type of unit for the shape range. + pub unit: UnitType, + /// THe origin of the coordinate in the shape. + pub origin: Coordinate, + /// The width for rectangles or diameter for circles. + /// + /// This field can be ignored for polygons. + pub width: Option, + /// The height of a rectnagle. + /// + /// This field can be ignored for circles and polygons. + pub height: Option, + /// If the range is inside the shape. + /// + /// The default value is true. + pub inside: Option, + /// The vertices of the polygon. + /// + /// This field can be ignored for rectangles and circles. + pub vertices: Option>, +} + +/// The type of time. +#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[cfg_attr(feature = "json_schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase")] +pub enum TimeType { + /// Times are described using Normal Play Time (npt) as described in RFC 2326. + #[default] + Npt, +} + +/// A temporal range representing a starting time to an ending time. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[cfg_attr(feature = "json_schema", derive(JsonSchema))] +#[skip_serializing_none] +pub struct Time { + /// The type of time. + #[serde(rename = "type", default)] + pub time_type: TimeType, + /// The start time or the start of the asset if not present. + pub start: Option, + /// The end time or the end of the asset if not present. + pub end: Option, +} + +/// A frame range representing starting and ending frames or pages. +/// +/// If both `start` and `end` are missing, the frame will span the entire asset. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[cfg_attr(feature = "json_schema", derive(JsonSchema))] +pub struct Frame { + /// The start of the frame or the end of the asset if not present. + /// + /// The first frame/page starts at 0. + pub start: Option, + /// The end of the frame inclusive or the end of the asset if not present. + pub end: Option, +} + +/// Selects a range of text via a fragment identifier. +/// +/// This is modeled after the W3C Web Annotation selector model. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[cfg_attr(feature = "json_schema", derive(JsonSchema))] +#[skip_serializing_none] +pub struct TextSelector { + // TODO: can we provide more specific types? + // + /// Fragment identifier as per RFC3023 (XML) or ISO 32000-2 (PDF), Annex O. + pub fragment: String, + /// The start character offset or the start of the fragment if not present. + pub start: Option, + /// The end character offset or the end of the fragment if not present. + pub end: Option, +} + +/// One or two [`TextSelector`][TextSelector] identifiying the range to select. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[cfg_attr(feature = "json_schema", derive(JsonSchema))] +#[skip_serializing_none] +pub struct TextSelectorRange { + /// The start (or entire) text range. + pub selector: TextSelector, + /// The end of the text range. + pub end: Option, +} + +/// A textual range representing multiple (possibly discontinuous) ranges of text. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[cfg_attr(feature = "json_schema", derive(JsonSchema))] +pub struct Text { + /// The ranges of text to select. + pub selectors: Vec, +} + +/// The type of range for the region of interest. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[cfg_attr(feature = "json_schema", derive(JsonSchema))] +#[serde(rename_all = "camelCase")] +pub enum RangeType { + /// A spatial range, see [`Shape`] for more details. + Spatial, + /// A temporal range, see [`Time`] for more details. + Temporal, + /// A spatial range, see [`Frame`] for more details. + Frame, + /// A textual range, see [`Text`] for more details. + Textual, +} + +// TODO: this can be much more idiomatic with an enum, but then it wouldn't line up with spec +// +/// A spatial, temporal, frame, or textual range describing the region of interest. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[cfg_attr(feature = "json_schema", derive(JsonSchema))] +#[skip_serializing_none] +pub struct Range { + /// The type of range of interest. + #[serde(rename = "type")] + pub range_type: RangeType, + /// A spatial range. + pub shape: Option, + /// A temporal range. + pub time: Option