Skip to content

Commit

Permalink
chore Fix dependabot issues:
Browse files Browse the repository at this point in the history
Issue: #591
Issue: #576
Issue: #444
Issue: #443
Issue: #301

Remove obsolete multihash support
  • Loading branch information
mauricefisher64 committed Sep 24, 2024
1 parent ea0ae2c commit 23c0db5
Show file tree
Hide file tree
Showing 9 changed files with 56 additions and 120 deletions.
14 changes: 7 additions & 7 deletions sdk/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -94,24 +94,23 @@ ed25519-dalek = "2.1.1"
fast-xml = "0.23.1"
hex = "0.4.3"
# Version 1.13.0 doesn't compile under Rust < 1.75, pinning to 1.12.0
id3 = "=1.12.0"
id3 = "=1.14.0"
img-parts = "0.3.0"
jfifdump = "0.5.1"
log = "0.4.8"
lopdf = { version = "0.31.0", optional = true }
lazy_static = "1.4.0"
memchr = "2.7.1"
multibase = "0.9.0"
multihash = "0.11.4"
mp4 = "0.13.0"
pem = "3.0.2"
png_pong = "0.9.1"
rand = "0.8.5"
rand_chacha = "0.3.1"
range-set = "0.0.11"
rasn-ocsp = "0.12.5"
rasn-pkix = "0.12.5"
rasn = "0.12.5"
rasn-ocsp = "0.17.2"
rasn-pkix = "0.17.2"
rasn = "0.17.2"
riff = "2.0.0"
schemars = { version = "0.8.21", optional = true }
serde = { version = "1.0.197", features = ["derive"] }
Expand All @@ -121,13 +120,14 @@ serde_derive = "1.0.197"
serde_json = { version = "1.0.117", features = ["preserve_order"] }
serde_with = "3.4.0"
serde-transcode = "1.1.1"
sha1 = "0.10.6"
sha2 = "0.10.6"
tempfile = "3.10.1"
thiserror = "1.0.61"
treeline = "0.1.0"
url = "2.5.2"
uuid = { version = "1.7.0", features = ["serde", "v4", "js"] }
x509-parser = "0.15.1"
x509-parser = "0.16.0"
x509-certificate = "0.21.0"
zip = { version = "0.6.6", default-features = false }

Expand All @@ -151,7 +151,7 @@ js-sys = "0.3.58"
rand_core = "0.9.0-alpha.2"
rsa = { version = "0.9.6", features = ["sha2"] }
serde-wasm-bindgen = "0.5.0"
spki = "0.6.0"
spki = "0.7.3"
wasm-bindgen = "0.2.83"
wasm-bindgen-futures = "0.4.31"
web-sys = { version = "0.3.58", features = [
Expand Down
13 changes: 6 additions & 7 deletions sdk/src/asset_handlers/mp3_io.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ fn get_manifest_pos(input_stream: &mut dyn CAIRead) -> Option<(u64, u32)> {
reader: input_stream,
};

if let Ok(tag) = Tag::read_from(reader) {
if let Ok(tag) = Tag::read_from2(reader) {
let mut manifests = Vec::new();

for eo in tag.encapsulated_objects() {
Expand Down Expand Up @@ -133,7 +133,7 @@ impl CAIReader for Mp3IO {

let mut manifest: Option<Vec<u8>> = None;

if let Ok(tag) = Tag::read_from(input_stream) {
if let Ok(tag) = Tag::read_from2(input_stream) {
for eo in tag.encapsulated_objects() {
if eo.mime_type == GEOB_FRAME_MIME_TYPE {
match manifest {
Expand All @@ -152,7 +152,7 @@ impl CAIReader for Mp3IO {
fn read_xmp(&self, input_stream: &mut dyn CAIRead) -> Option<String> {
input_stream.rewind().ok()?;

if let Ok(tag) = Tag::read_from(input_stream) {
if let Ok(tag) = Tag::read_from2(input_stream) {
for frame in tag.frames() {
if let Content::Private(private) = frame.content() {
if &private.owner_identifier == "XMP" {
Expand Down Expand Up @@ -198,7 +198,7 @@ impl RemoteRefEmbed for Mp3IO {
let reader = CAIReadWrapper {
reader: source_stream,
};
if let Ok(tag) = Tag::read_from(reader) {
if let Ok(tag) = Tag::read_from2(reader) {
for f in tag.frames() {
match f.content() {
Content::Private(private) => {
Expand All @@ -222,8 +222,7 @@ impl RemoteRefEmbed for Mp3IO {
let frame = Frame::with_content(
"PRIV",
Content::Private(Private {
// Null-terminated
owner_identifier: "XMP\0".to_owned(),
owner_identifier: "XMP".to_owned(),
private_data: xmp.into_bytes(),
}),
);
Expand Down Expand Up @@ -358,7 +357,7 @@ impl CAIWriter for Mp3IO {
reader: input_stream,
};

if let Ok(tag) = Tag::read_from(reader) {
if let Ok(tag) = Tag::read_from2(reader) {
for f in tag.frames() {
match f.content() {
// remove existing manifest keeping existing frames
Expand Down
9 changes: 5 additions & 4 deletions sdk/src/cose_validator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -212,15 +212,15 @@ pub(crate) fn check_cert(

let (_i, (ha_alg, mgf_ai)) = seq
.parse(|i| {
let (i, h) = Header::from_der(i)?;
let (i, h) = <Header as asn1_rs::FromDer>::from_der(i)?;
if h.class() != Class::ContextSpecific || h.tag() != Tag(0) {
return Err(nom::Err::Error(asn1_rs::Error::BerValueError));
}

let (i, ha_alg) = AlgorithmIdentifier::from_der(i)
.map_err(|_| nom::Err::Error(asn1_rs::Error::BerValueError))?;

let (i, h) = Header::from_der(i)?;
let (i, h) = <Header as asn1_rs::FromDer>::from_der(i)?;
if h.class() != Class::ContextSpecific || h.tag() != Tag(1) {
return Err(nom::Err::Error(asn1_rs::Error::BerValueError));
}
Expand All @@ -240,14 +240,15 @@ pub(crate) fn check_cert(
.map_err(|_| Error::CoseInvalidCert)?;

let (_i, mgf_ai_params_algorithm) =
Any::from_der(&mgf_ai_parameters.content).map_err(|_| Error::CoseInvalidCert)?;
<Any as asn1_rs::FromDer>::from_der(&mgf_ai_parameters.content)
.map_err(|_| Error::CoseInvalidCert)?;

let mgf_ai_params_algorithm = mgf_ai_params_algorithm
.as_oid()
.map_err(|_| Error::CoseInvalidCert)?;

// must be the same
if ha_alg.algorithm != mgf_ai_params_algorithm {
if ha_alg.algorithm.to_id_string() != mgf_ai_params_algorithm.to_id_string() {
let log_item = log_item!(
"Cose_Sign1",
"certificate algorithm error",
Expand Down
2 changes: 1 addition & 1 deletion sdk/src/ocsp_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ pub(crate) fn fetch_ocsp_response(certs: &[Vec<u8>]) -> Option<Vec<u8>> {
let request_list = vec![ocsp_req];

let tbs_request = rasn_ocsp::TbsRequest {
version: rasn_ocsp::Version::parse_bytes(b"0", 16)?,
version: rasn_ocsp::Version::from(0u8),
requestor_name: None,
request_list,
request_extensions: None,
Expand Down
2 changes: 1 addition & 1 deletion sdk/src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4906,7 +4906,7 @@ pub mod tests {

#[test]
fn test_display() {
let ap = fixture_path("CA.jpg");
let ap = fixture_path("2error.jpg");
let mut report = DetailedStatusTracker::new();
let store = Store::load_from_asset(&ap, true, &mut report).expect("load_from_asset");
let _errors = report_split_errors(report.get_log_mut());
Expand Down
4 changes: 2 additions & 2 deletions sdk/src/trust_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ pub(crate) static DOCUMENT_SIGNING_OID: Oid<'static> = oid!(1.3.6 .1 .5 .5 .7 .3
// Trait for supply configuration and handling of trust lists and EKU configuration store
//
// `RefUnwindSafe` + `UnwindSafe` were added to ensure `Store` is unwind safe and to preserve
// backwards compatbility.
// backwards compatibility.
pub(crate) trait TrustHandlerConfig: RefUnwindSafe + UnwindSafe + Sync + Send {
fn new() -> Self
where
Expand Down Expand Up @@ -88,7 +88,7 @@ pub(crate) fn has_allowed_oid<'a>(
let mut last_oid = None;
if eku.other.iter().any(|v| {
allowed_ekus.iter().any(|oid| {
if oid == v {
if oid.to_id_string() == v.to_id_string() {
last_oid = Some(oid);
true
} else {
Expand Down
93 changes: 11 additions & 82 deletions sdk/src/utils/hash_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,6 @@ use std::{

//use conv::ValueFrom;
use log::warn;
// multihash versions
use multibase::{decode, encode};
use multihash::{wrap, Code, Multihash, Sha1, Sha2_256, Sha2_512, Sha3_256, Sha3_384, Sha3_512};
use range_set::RangeSet;
use serde::{Deserialize, Serialize};
// direct sha functions
Expand Down Expand Up @@ -89,21 +86,6 @@ pub fn vec_compare(va: &[u8], vb: &[u8]) -> bool {
.all(|(a,b)| a == b)
}

/// Generate hash of type hash_type for supplied data array. The
/// hash_type are those specified in the multihash specification. Currently
/// we only support Sha2-256/512 or Sha2-256/512.
/// Returns hash or None if incompatible type
pub fn hash_by_type(hash_type: u8, data: &[u8]) -> Option<Multihash> {
match hash_type {
0x12 => Some(Sha2_256::digest(data)),
0x13 => Some(Sha2_512::digest(data)),
0x14 => Some(Sha3_512::digest(data)),
0x15 => Some(Sha3_384::digest(data)),
0x16 => Some(Sha3_256::digest(data)),
_ => None,
}
}

#[derive(Clone)]
pub enum Hasher {
SHA256(Sha256),
Expand Down Expand Up @@ -462,76 +444,23 @@ where
/// Return a Sha256 hash of array of bytes
#[allow(dead_code)]
pub fn hash_sha256(data: &[u8]) -> Vec<u8> {
let mh = Sha2_256::digest(data);
let digest = mh.digest();

digest.to_vec()
let mut hasher = Hasher::SHA256(Sha256::new());
hasher.update(data);
Hasher::finalize(hasher)
}

pub fn hash_sha1(data: &[u8]) -> Vec<u8> {
let mh = Sha1::digest(data);
let digest = mh.digest();
digest.to_vec()
}

/// Verify muiltihash against input data. True if match,
/// false if no match or unsupported. The hash value should be
/// be multibase encoded string.
pub fn verify_hash(hash: &str, data: &[u8]) -> bool {
match decode(hash) {
Ok((_code, mh)) => {
if mh.len() < 2 {
return false;
}

// multihash lead bytes
let hash_type = mh[0]; // hash type
let _hash_len = mh[1]; // hash data length

// hash with the same algorithm as target
if let Some(data_hash) = hash_by_type(hash_type, data) {
vec_compare(data_hash.digest(), &mh.as_slice()[2..])
} else {
false
}
}
Err(_) => false,
}
}
use sha1::{Digest, Sha1};

/// Return the hash of data in the same hash format in_hash
pub fn hash_as_source(in_hash: &str, data: &[u8]) -> Option<String> {
match decode(in_hash) {
Ok((code, mh)) => {
if mh.len() < 2 {
return None;
}
// create a Sha1 object
let mut hasher = Sha1::new();

// multihash lead bytes
let hash_type = mh[0]; // hash type

// hash with the same algorithm as target
match hash_by_type(hash_type, data) {
Some(hash) => {
let digest = hash.digest();

let wrapped = match hash_type {
0x12 => wrap(Code::Sha2_256, digest),
0x13 => wrap(Code::Sha2_512, digest),
0x14 => wrap(Code::Sha3_512, digest),
0x15 => wrap(Code::Sha3_384, digest),
0x16 => wrap(Code::Sha3_256, digest),
_ => return None,
};
// process input message
hasher.update(data);

// Return encoded hash.
Some(encode(code, wrapped.as_bytes()))
}
None => None,
}
}
Err(_) => None,
}
// acquire hash digest in the form of GenericArray,
// which in this case is equivalent to [u8; 20]
hasher.finalize().to_vec()
}

// Used by Merkle tree calculations to generate the pair wise hash
Expand Down
14 changes: 9 additions & 5 deletions sdk/src/wasm/webcrypto_validator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
use std::convert::TryFrom;

use js_sys::{Array, ArrayBuffer, Object, Reflect, Uint8Array};
use spki::SubjectPublicKeyInfo;
use spki::SubjectPublicKeyInfoRef;
use wasm_bindgen::prelude::*;
use wasm_bindgen_futures::JsFuture;
use web_sys::{CryptoKey, SubtleCrypto};
Expand Down Expand Up @@ -165,10 +165,12 @@ pub(crate) async fn async_validate(
use rsa::{pkcs1v15::Signature, signature::Verifier};

// used for certificate validation
let spki = SubjectPublicKeyInfo::try_from(pkey.as_ref())
let spki = SubjectPublicKeyInfoRef::try_from(pkey.as_ref())
.map_err(|err| Error::WasmRsaKeyImport(err.to_string()))?;
let (_, seq) = parse_ber_sequence(spki.subject_public_key)

let (_, seq) = parse_ber_sequence(&spki.subject_public_key.raw_bytes())
.map_err(|err| Error::WasmRsaKeyImport(err.to_string()))?;

let modulus = biguint_val(&seq[0]);
let exp = biguint_val(&seq[1]);
let public_key = RsaPublicKey::new(modulus, exp)
Expand Down Expand Up @@ -217,10 +219,12 @@ pub(crate) async fn async_validate(
"RSA-PSS" => {
use rsa::{pss::Signature, signature::Verifier};

let spki = SubjectPublicKeyInfo::try_from(pkey.as_ref())
let spki = SubjectPublicKeyInfoRef::try_from(pkey.as_ref())
.map_err(|err| Error::WasmRsaKeyImport(err.to_string()))?;
let (_, seq) = parse_ber_sequence(spki.subject_public_key)

let (_, seq) = parse_ber_sequence(&spki.subject_public_key.raw_bytes())
.map_err(|err| Error::WasmRsaKeyImport(err.to_string()))?;

// We need to normalize this from SHA-256 (the format WebCrypto uses) to sha256
// (the format the util function expects) so that it maps correctly
let normalized_hash = hash.clone().replace("-", "").to_lowercase();
Expand Down
Loading

0 comments on commit 23c0db5

Please sign in to comment.