From 83f7cb67fb8c1d79aa0ed0217544ab35218dbcaf Mon Sep 17 00:00:00 2001 From: Gavin Mendel-Gleason Date: Fri, 20 Jan 2023 09:29:10 +0100 Subject: [PATCH 1/7] Getting a start on removing clippy warnings --- src/layer/internal/object_iterator.rs | 8 ++++---- src/storage/consts.rs | 13 ++++++------- src/storage/locking.rs | 8 +++----- src/structure/smallbitarray.rs | 14 +++++++++----- 4 files changed, 22 insertions(+), 21 deletions(-) diff --git a/src/layer/internal/object_iterator.rs b/src/layer/internal/object_iterator.rs index aa2faf9e..e130cfa5 100644 --- a/src/layer/internal/object_iterator.rs +++ b/src/layer/internal/object_iterator.rs @@ -22,10 +22,10 @@ impl InternalLayerTripleObjectIterator { s_p_adjacency_list: AdjacencyList, ) -> Self { Self { - subjects: subjects, - objects: objects, - o_ps_adjacency_list: o_ps_adjacency_list, - s_p_adjacency_list: s_p_adjacency_list, + subjects, + objects, + o_ps_adjacency_list, + s_p_adjacency_list, o_position: 0, o_ps_position: 0, peeked: None, diff --git a/src/storage/consts.rs b/src/storage/consts.rs index bae2d668..b6a66766 100644 --- a/src/storage/consts.rs +++ b/src/storage/consts.rs @@ -505,7 +505,7 @@ lazy_static! { ]); } -pub const SHARED_REQUIRED_FILES: [&'static str; 8] = [ +pub const SHARED_REQUIRED_FILES: [&str; 8] = [ FILENAMES.node_dictionary_blocks, FILENAMES.node_dictionary_offsets, FILENAMES.predicate_dictionary_blocks, @@ -516,7 +516,7 @@ pub const SHARED_REQUIRED_FILES: [&'static str; 8] = [ FILENAMES.value_dictionary_offsets, ]; -pub const SHARED_OPTIONAL_FILES: [&'static str; 7] = [ +pub const SHARED_OPTIONAL_FILES: [&str; 7] = [ FILENAMES.node_value_idmap_bits, FILENAMES.node_value_idmap_bit_index_blocks, FILENAMES.node_value_idmap_bit_index_sblocks, @@ -526,7 +526,7 @@ pub const SHARED_OPTIONAL_FILES: [&'static str; 7] = [ FILENAMES.rollup, ]; -pub const BASE_LAYER_REQUIRED_FILES: [&'static str; 15] = [ +pub const BASE_LAYER_REQUIRED_FILES: [&str; 15] = [ FILENAMES.base_s_p_adjacency_list_nums, FILENAMES.base_s_p_adjacency_list_bits, FILENAMES.base_s_p_adjacency_list_bit_index_blocks, @@ -544,10 +544,9 @@ pub const BASE_LAYER_REQUIRED_FILES: [&'static str; 15] = [ FILENAMES.base_predicate_wavelet_tree_bit_index_sblocks, ]; -pub const BASE_LAYER_OPTIONAL_FILES: [&'static str; 2] = - [FILENAMES.base_subjects, FILENAMES.base_objects]; +pub const BASE_LAYER_OPTIONAL_FILES: [&str; 2] = [FILENAMES.base_subjects, FILENAMES.base_objects]; -pub const CHILD_LAYER_REQUIRED_FILES: [&'static str; 31] = [ +pub const CHILD_LAYER_REQUIRED_FILES: [&str; 31] = [ FILENAMES.parent, FILENAMES.pos_s_p_adjacency_list_nums, FILENAMES.pos_s_p_adjacency_list_bits, @@ -581,7 +580,7 @@ pub const CHILD_LAYER_REQUIRED_FILES: [&'static str; 31] = [ FILENAMES.neg_predicate_wavelet_tree_bit_index_sblocks, ]; -pub const CHILD_LAYER_OPTIONAL_FILES: [&'static str; 4] = [ +pub const CHILD_LAYER_OPTIONAL_FILES: [&str; 4] = [ FILENAMES.pos_subjects, FILENAMES.pos_objects, FILENAMES.neg_subjects, diff --git a/src/storage/locking.rs b/src/storage/locking.rs index 32413a92..5374cf5c 100644 --- a/src/storage/locking.rs +++ b/src/storage/locking.rs @@ -52,11 +52,9 @@ impl Future for LockedFileLockFuture { if exclusive { file.lock_exclusive() .expect("failed to acquire exclusive lock") - } else { - if !cfg!(feature = "noreadlock") { - file.lock_shared() - .expect("failed to acquire exclusive lock") - } + } else if !cfg!(feature = "noreadlock") { + file.lock_shared() + .expect("failed to acquire exclusive lock") } })); } diff --git a/src/structure/smallbitarray.rs b/src/structure/smallbitarray.rs index b7108c43..23cd9646 100644 --- a/src/structure/smallbitarray.rs +++ b/src/structure/smallbitarray.rs @@ -62,7 +62,7 @@ impl Iterator for SmallBitArrayIter { return None; } - let result = (self.val & 0x80000000_00000000) != 0; + let result = (self.val & 0x8000_0000_0000_0000) != 0; self.val <<= 1; self.ix += 1; @@ -79,13 +79,15 @@ mod tests { expected = "lsb set for a small bit array. this is reserved for future expansion" )] fn panic_with_set_lsb() { - let val: u64 = 0b01101011_10111001_10010010_00000111_10010001_01100101_00000000_11111111; + let val: u64 = + 0b0110_1011_1011_1001_1001_0010_0000_0111_1001_0001_0110_0101_0000_0000_1111_1111; let _x = SmallBitArray::new(val); } #[test] fn get_from_small_bit_array() { - let val: u64 = 0b01101011_10111001_10010010_00000111_10010001_01100101_00000000_11111110; + let val: u64 = + 0b0110_1011_1011_1001_1001_0010_0000_0111_1001_0001_0110_0101_0000_0000_1111_1110; let arr = SmallBitArray::new(val); @@ -108,7 +110,8 @@ mod tests { #[test] fn iterate_small_bit_array() { - let val: u64 = 0b01101011_10111001_10010010_00000111_10010001_01100101_00000000_11111110; + let val: u64 = + 0b0110_1011_1011_1001_1001_0010_0000_0111_1001_0001_0110_0101_0000_0000_1111_1110; let arr = SmallBitArray::new(val); @@ -133,7 +136,8 @@ mod tests { #[test] fn small_bit_array_rank() { - let val: u64 = 0b01101011_10111001_10010010_00000111_10010001_01100101_00000000_11111110; + let val: u64 = + 0b0110_1011_1011_1001_1001_0010_0000_0111_1001_0001_0110_0101_0000_0000_1111_1110; let arr = SmallBitArray::new(val); From f54f962b8d5bf706afcaa5a9918d3e2c6b93744b Mon Sep 17 00:00:00 2001 From: Gavin Mendel-Gleason Date: Fri, 20 Jan 2023 16:12:14 +0100 Subject: [PATCH 2/7] Early attempts at clippy fixes --- src/structure/tfc/typed.rs | 34 ++++++++++++++-------------------- src/structure/util.rs | 2 +- src/structure/wavelettree.rs | 6 +++--- 3 files changed, 18 insertions(+), 24 deletions(-) diff --git a/src/structure/tfc/typed.rs b/src/structure/tfc/typed.rs index 0a8d7455..5242f1aa 100644 --- a/src/structure/tfc/typed.rs +++ b/src/structure/tfc/typed.rs @@ -141,7 +141,7 @@ impl TypedDict { } else { type_offset = self.type_offsets.entry(i - 1) as usize; id_offset = self.type_id_offsets[i - 1]; - block_offset = self.block_offsets.entry(type_offset as usize) as usize; + block_offset = self.block_offsets.entry(type_offset) as usize; } let len; @@ -173,16 +173,12 @@ impl TypedDict { ( SizedDict::from_parts(logarray_slice, data_slice, block_offset as u64), - id_offset as u64, + id_offset, ) } pub fn type_segment(&self, dt: Datatype) -> Option<(SizedDict, u64)> { - if let Some(i) = self.types_present.index_of(dt as u64) { - Some(self.inner_type_segment(i)) - } else { - None - } + self.types_present.index_of(dt as u64).map(|i| self.inner_type_segment(i)) } // TOOD: would be nice if this worked on a buf instead of a slice @@ -230,9 +226,9 @@ impl TypedDict { self.num_entries } - pub fn segment_iter<'a>(&'a self) -> DictSegmentIterator<'a> { + pub fn segment_iter(&self) -> DictSegmentIterator<'_> { DictSegmentIterator { - dict: Cow::Borrowed(&self), + dict: Cow::Borrowed(self), type_index: 0, } } @@ -244,9 +240,7 @@ impl TypedDict { } } - pub fn block_iter<'a>( - &'a self, - ) -> impl Iterator + 'a + Clone { + pub fn block_iter(&self) -> impl Iterator + '_ + Clone { self.segment_iter().flat_map(|(datatype, segment)| { segment .into_block_iter() @@ -262,7 +256,7 @@ impl TypedDict { }) } - pub fn iter<'a>(&'a self) -> impl Iterator + 'a + Clone { + pub fn iter(&self) -> impl Iterator + '_ + Clone { self.block_iter().flat_map(|(datatype, segment)| { segment .into_iter() @@ -332,7 +326,7 @@ impl TypedDictSegment { self.dict.num_entries() } - pub fn iter<'a>(&'a self) -> impl Iterator + 'a + Clone { + pub fn iter(&self) -> impl Iterator + '_ + Clone { self.dict.iter() } @@ -365,7 +359,7 @@ impl StringDict { self.0.num_entries() } - pub fn iter<'a>(&'a self) -> impl Iterator + 'a + Clone { + pub fn iter(&self) -> impl Iterator + '_ + Clone { self.0.iter() } @@ -443,9 +437,9 @@ impl TypedDictBufBuilder( w: &mut W, bytes: &[u8], ) -> Result { - w.write_all(&bytes).await?; + w.write_all(bytes).await?; w.write_all(&[0]).await?; let count = bytes.len() + 1; diff --git a/src/structure/wavelettree.rs b/src/structure/wavelettree.rs index 0cef8957..7a3c9430 100644 --- a/src/structure/wavelettree.rs +++ b/src/structure/wavelettree.rs @@ -118,7 +118,7 @@ impl WaveletTree { let len = self.len() as u64; let mut offset = index as u64; let mut alphabet_start = 0; - let mut alphabet_end = 2_u64.pow(self.num_layers as u32) as u64; + let mut alphabet_end = 2_u64.pow(self.num_layers as u32); let mut range_start = 0; let mut range_end = len; for i in 0..self.num_layers as u64 { @@ -164,7 +164,7 @@ impl WaveletTree { let width = self.len() as u64; let mut slices = Vec::with_capacity(self.num_layers as usize); let mut alphabet_start = 0; - let mut alphabet_end = 2_u64.pow(self.num_layers as u32) as u64; + let mut alphabet_end = 2_u64.pow(self.num_layers as u32); if entry >= alphabet_end { return None; @@ -259,7 +259,7 @@ fn create_fragments(width: u8) -> Vec { result } -fn push_to_fragments(num: u64, width: u8, fragments: &mut Vec) { +fn push_to_fragments(num: u64, width: u8, fragments: &mut [FragmentBuilder]) { let mut num_it: usize = num.try_into().unwrap(); // this will ensure that we get some sort of error on 32 bit for large nums for i in 0..width { num_it >>= 1; From 7436938ad709112b1c426b89e1d75ce2207ae1f0 Mon Sep 17 00:00:00 2001 From: Gavin Mendel-Gleason Date: Fri, 20 Jan 2023 23:34:53 +0100 Subject: [PATCH 3/7] Fixes for a few problems --- src/structure/tfc/block.rs | 15 ++++++++------ src/structure/tfc/dict.rs | 41 +++++++++++--------------------------- 2 files changed, 21 insertions(+), 35 deletions(-) diff --git a/src/structure/tfc/block.rs b/src/structure/tfc/block.rs index 95e061f7..27f6126b 100644 --- a/src/structure/tfc/block.rs +++ b/src/structure/tfc/block.rs @@ -49,11 +49,10 @@ impl SizedBlockHeader { for i in 0..(num_entries - 1) as usize { let (shared, _) = vbyte::decode_buf(buf)?; - let size = if record_size == None { - let (size, _) = vbyte::decode_buf(buf)?; - size + let size = if let Some(record_size) = record_size { + record_size as u64 - shared } else { - record_size.unwrap() as u64 - shared + vbyte::decode_buf(buf)?.0 }; sizes[i] = size as usize; shareds[i] = shared as usize; @@ -163,6 +162,10 @@ impl SizedDictEntry { self.chunks().map(|s| s.len()).sum() } + pub fn is_empty(&self) -> bool { + self.chunks().next().is_none() + } + fn rope_len(&self) -> usize { match self { Self::Single(_) => 1, @@ -191,7 +194,7 @@ impl SizedDictEntry { pub fn buf_eq(&self, mut b: B) -> bool { if self.len() != b.remaining() { false - } else if self.len() == 0 { + } else if self.is_empty() { true } else { let mut it = self.chunks(); @@ -249,7 +252,7 @@ impl Hash for SizedDictEntry { impl Ord for SizedDictEntry { fn cmp(&self, other: &Self) -> Ordering { // both are empty, so equal - if self.len() == 0 && other.len() == 0 { + if self.is_empty() && other.is_empty() { return Ordering::Equal; } diff --git a/src/structure/tfc/dict.rs b/src/structure/tfc/dict.rs index ce4deee1..ed035265 100644 --- a/src/structure/tfc/dict.rs +++ b/src/structure/tfc/dict.rs @@ -66,7 +66,7 @@ impl SizedDictBufBuilder { } pub fn finalize(mut self) -> (LateLogArrayBufBuilder, B2, u64, u64) { - if self.current_block.len() > 0 { + if !self.current_block.is_empty() { let current_block: Vec<&[u8]> = self.current_block.iter().map(|e| e.as_ref()).collect(); let size = build_block_unchecked(self.record_size, &mut self.data_buf, ¤t_block); self.block_offset += size as u64; @@ -116,12 +116,11 @@ impl SizedDict { } fn block_offset(&self, block_index: usize) -> usize { - let offset: usize; - if block_index == 0 { - offset = 0; + let offset: usize = if block_index == 0 { + 0 } else { - offset = (self.offsets.entry(block_index - 1) - self.dict_offset) as usize; - } + (self.offsets.entry(block_index - 1) - self.dict_offset) as usize + }; offset } @@ -131,10 +130,7 @@ impl SizedDict { panic!("empty dictionary has no block"); } let offset = self.block_offset(block_index); - let block_bytes; - block_bytes = self.data.slice(offset..); - - block_bytes + self.data.slice(offset..) } pub fn block(&self, block_index: usize) -> SizedDictBlock { @@ -168,8 +164,8 @@ impl SizedDict { if index > self.num_entries() { return None; } - let block = self.block(((index - 1) / 8) as usize); - Some(block.entry(((index - 1) % 8) as usize)) + let block = self.block((index - 1) / 8); + Some(block.entry((index - 1) % 8)) } pub fn id(&self, slice: &[u8]) -> IdLookupResult { @@ -204,23 +200,10 @@ impl SizedDict { let block = self.block(found); let block_id = block.id(slice); let offset = (found * BLOCK_SIZE) as u64 + 1; - let result = block_id.offset(offset).default(offset - 1); - /* - if found != 0 { - // the default value will fill in the last index of the - // previous block if the entry was not found in the - // current block. This is only possible if the block as - // not the very first one. - result.default(self.block_num_elements(found - 1) as u64 + offset - 1) - } else { - result - } - */ - - result + block_id.offset(offset).default(offset - 1) } - pub fn block_iter<'a>(&'a self) -> SizedDictBlockIterator<'a> { + pub fn block_iter(&self) -> SizedDictBlockIterator { SizedDictBlockIterator { dict: Cow::Borrowed(self), index: 0, @@ -234,7 +217,7 @@ impl SizedDict { } } - pub fn iter<'a>(&'a self) -> impl Iterator + 'a + Clone { + pub fn iter(&self) -> impl Iterator + '_ + Clone { self.block_iter().flat_map(|b| b.into_iter()) } @@ -356,7 +339,7 @@ mod tests { build_dict_and_offsets( &mut array_buf, &mut data_buf, - strings.clone().into_iter().map(|s| Bytes::from(s)), + strings.clone().into_iter().map(Bytes::from), ); let array_bytes = array_buf.freeze(); From 6e8fcb9ce6aa10544842be701937dead3b9a9714 Mon Sep 17 00:00:00 2001 From: Gavin Mendel-Gleason Date: Sat, 21 Jan 2023 11:43:01 +0100 Subject: [PATCH 4/7] More passing tests --- src/layer/builder.rs | 11 +++------ src/layer/internal/base.rs | 24 +++++-------------- src/layer/internal/mod.rs | 7 ++---- src/layer/internal/subject_iterator.rs | 15 ++++++------ src/layer/layer.rs | 19 +++++++-------- src/layer/simple_builder.rs | 4 ++-- src/storage/archive.rs | 8 ++----- src/storage/delta.rs | 5 ++-- src/storage/directory.rs | 6 ++--- src/storage/file.rs | 18 +++++++-------- src/storage/layer.rs | 24 ++++++++----------- src/storage/memory.rs | 6 +++++ src/storage/pack.rs | 6 ++--- src/store/mod.rs | 10 ++++---- src/structure/adjacencylist.rs | 2 +- src/structure/bitindex.rs | 9 +++++--- src/structure/logarray.rs | 22 ++++++++---------- src/structure/tfc/block.rs | 32 +++++++++++++++----------- src/structure/tfc/decimal.rs | 4 ++-- 19 files changed, 104 insertions(+), 128 deletions(-) diff --git a/src/layer/builder.rs b/src/layer/builder.rs index d8a18d73..40381bac 100644 --- a/src/layer/builder.rs +++ b/src/layer/builder.rs @@ -59,20 +59,15 @@ impl DictionarySetFileBuilder { /// /// Panics if the given predicate string is not a lexical successor of the previous node string. pub fn add_predicate(&mut self, predicate: &str) -> u64 { - let id = self - .predicate_dictionary_builder - .add(Bytes::copy_from_slice(predicate.as_bytes())); - - id + self.predicate_dictionary_builder + .add(Bytes::copy_from_slice(predicate.as_bytes())) } /// Add a value string. /// /// Panics if the given value string is not a lexical successor of the previous value string. pub fn add_value(&mut self, value: TypedDictEntry) -> u64 { - let id = self.value_dictionary_builder.add(value); - - id + self.value_dictionary_builder.add(value) } /// Add nodes from an iterable. diff --git a/src/layer/internal/base.rs b/src/layer/internal/base.rs index eb7654cd..10cc11dc 100644 --- a/src/layer/internal/base.rs +++ b/src/layer/internal/base.rs @@ -172,27 +172,21 @@ impl BaseLayerFileBuilder { /// /// Panics if the given node string is not a lexical successor of the previous node string. pub fn add_node(&mut self, node: &str) -> u64 { - let id = self.builder.add_node(node); - - id + self.builder.add_node(node) } /// Add a predicate string. /// /// Panics if the given predicate string is not a lexical successor of the previous node string. pub fn add_predicate(&mut self, predicate: &str) -> u64 { - let id = self.builder.add_predicate(predicate); - - id + self.builder.add_predicate(predicate) } /// Add a value string. /// /// Panics if the given value string is not a lexical successor of the previous value string. pub fn add_value(&mut self, value: TypedDictEntry) -> u64 { - let id = self.builder.add_value(value); - - id + self.builder.add_value(value) } /// Add nodes from an iterable. @@ -206,9 +200,7 @@ impl BaseLayerFileBuilder { ::IntoIter: Unpin + Send + Sync, I: Unpin + Sync, { - let ids = self.builder.add_nodes(nodes); - - ids + self.builder.add_nodes(nodes) } /// Add predicates from an iterable. @@ -222,9 +214,7 @@ impl BaseLayerFileBuilder { ::IntoIter: Unpin + Send + Sync, I: Unpin + Sync, { - let ids = self.builder.add_predicates(predicates); - - ids + self.builder.add_predicates(predicates) } /// Add values from an iterable. @@ -238,9 +228,7 @@ impl BaseLayerFileBuilder { ::IntoIter: Unpin + Send + Sync, I: Unpin + Sync, { - let ids = self.builder.add_values(values); - - ids + self.builder.add_values(values) } /// Turn this builder into a phase 2 builder that will take triple data. diff --git a/src/layer/internal/mod.rs b/src/layer/internal/mod.rs index cd8d3455..690c012b 100644 --- a/src/layer/internal/mod.rs +++ b/src/layer/internal/mod.rs @@ -524,10 +524,7 @@ impl InternalLayer { } pub fn is_rollup(&self) -> bool { - match self { - Rollup(_) => true, - _ => false, - } + matches!(self, Rollup(_)) } } @@ -635,7 +632,7 @@ impl Layer for InternalLayer { parent_count = parent_count - current_layer.node_dict_len() as u64 - current_layer.value_dict_len() as u64; - if corrected_id > parent_count as u64 { + if corrected_id > parent_count { // subject, if it exists, is in this layer corrected_id -= parent_count; } else { diff --git a/src/layer/internal/subject_iterator.rs b/src/layer/internal/subject_iterator.rs index 5c356b50..a20eedef 100644 --- a/src/layer/internal/subject_iterator.rs +++ b/src/layer/internal/subject_iterator.rs @@ -22,9 +22,9 @@ impl InternalLayerTripleSubjectIterator { sp_o_adjacency_list: AdjacencyList, ) -> Self { Self { - subjects: subjects, - s_p_adjacency_list: s_p_adjacency_list, - sp_o_adjacency_list: sp_o_adjacency_list, + subjects, + s_p_adjacency_list, + sp_o_adjacency_list, s_position: 0, s_p_position: 0, sp_o_position: 0, @@ -422,7 +422,7 @@ impl Iterator for InternalTripleStackIterator { (Some(lowest_pos_index), Some(lowest_neg_index)) => { let lowest_pos = self.positives[lowest_pos_index].peek().unwrap(); let lowest_neg = self.negatives[lowest_neg_index].peek().unwrap(); - match lowest_pos.cmp(&lowest_neg) { + match lowest_pos.cmp(lowest_neg) { Ordering::Less => { // next change is an addition, and there's no matching removal return Some(( @@ -479,7 +479,7 @@ mod tests { #[tokio::test] async fn base_triple_removal_iterator() { - let base_layer: InternalLayer = example_base_layer().await.into(); + let base_layer: InternalLayer = example_base_layer().await; let triples: Vec<_> = base_layer.internal_triple_removals().collect(); assert!(triples.is_empty()); @@ -811,7 +811,7 @@ mod tests { async fn child_layer() -> InternalLayer { let base_layer = example_base_layer().await; - let parent: Arc = Arc::new(base_layer.into()); + let parent: Arc = Arc::new(base_layer); let child_files = child_layer_files(); @@ -830,7 +830,6 @@ mod tests { ChildLayer::load_from_files([5, 4, 3, 2, 1], parent, &child_files) .await .unwrap() - .into() } #[tokio::test] @@ -1032,7 +1031,7 @@ mod tests { async fn iterate_partial_stack() { let (parent_id, layer) = create_stack_for_partial_tests().await; - let iterator = InternalTripleStackIterator::from_layer_stack(&*layer, parent_id).unwrap(); + let iterator = InternalTripleStackIterator::from_layer_stack(&layer, parent_id).unwrap(); let changes: Vec<_> = iterator .map(|t| (t.0, layer.id_triple_to_string(&t.1).unwrap())) .collect(); diff --git a/src/layer/layer.rs b/src/layer/layer.rs index 51652384..7b4ed487 100644 --- a/src/layer/layer.rs +++ b/src/layer/layer.rs @@ -98,8 +98,8 @@ pub trait Layer: Send + Sync { self.subject_id(&triple.subject).and_then(|subject| { self.predicate_id(&triple.predicate).and_then(|predicate| { match &triple.object { - ObjectType::Node(node) => self.object_node_id(&node), - ObjectType::Value(value) => self.object_value_id(&value), + ObjectType::Node(node) => self.object_node_id(node), + ObjectType::Value(value) => self.object_value_id(value), } .map(|object| IdTriple { subject, @@ -127,11 +127,11 @@ pub trait Layer: Send + Sync { .unwrap_or(PossiblyResolved::Unresolved(triple.predicate)), object: match &triple.object { ObjectType::Node(node) => self - .object_node_id(&node) + .object_node_id(node) .map(PossiblyResolved::Resolved) .unwrap_or(PossiblyResolved::Unresolved(triple.object)), ObjectType::Value(value) => self - .object_value_id(&value) + .object_value_id(value) .map(PossiblyResolved::Resolved) .unwrap_or(PossiblyResolved::Unresolved(triple.object)), }, @@ -270,7 +270,7 @@ impl PossiblyResolved { /// Return a PossiblyResolved with the inner value as a reference. pub fn as_ref(&self) -> PossiblyResolved<&T> { match self { - Self::Unresolved(u) => PossiblyResolved::Unresolved(&u), + Self::Unresolved(u) => PossiblyResolved::Unresolved(u), Self::Resolved(id) => PossiblyResolved::Resolved(*id), } } @@ -426,8 +426,7 @@ mod tests { let base: Arc = Arc::new( BaseLayer::load_from_files([1, 2, 3, 4, 5], &files) .await - .unwrap() - .into(), + .unwrap(), ); let files = child_layer_files(); @@ -439,8 +438,7 @@ mod tests { let child: Arc = Arc::new( ChildLayer::load_from_files([5, 4, 3, 2, 1], base.clone(), &files) .await - .unwrap() - .into(), + .unwrap(), ); // TODO why are we not using these results? @@ -633,8 +631,7 @@ mod tests { let base: Arc = Arc::new( BaseLayer::load_from_files([1, 2, 3, 4, 5], &files) .await - .unwrap() - .into(), + .unwrap(), ); let mut results: Vec<_> = base diff --git a/src/layer/simple_builder.rs b/src/layer/simple_builder.rs index 90c4a23e..81c25bb7 100644 --- a/src/layer/simple_builder.rs +++ b/src/layer/simple_builder.rs @@ -287,7 +287,7 @@ fn zero_equivalents( let mut removals_iter = removals.iter_mut().peekable(); 'outer: for mut addition in additions { let mut next = removals_iter.peek(); - if next == None { + if next.is_none() { break; } @@ -296,7 +296,7 @@ fn zero_equivalents( removals_iter.next().unwrap(); next = removals_iter.peek(); - if next == None { + if next.is_none() { break 'outer; } else if next >= Some(&addition) { break; diff --git a/src/storage/archive.rs b/src/storage/archive.rs index 7eb2324f..255d94c4 100644 --- a/src/storage/archive.rs +++ b/src/storage/archive.rs @@ -67,11 +67,7 @@ impl ConstructionFile { fn is_finalized(&self) -> bool { let guard = self.0.read().unwrap(); - if let ConstructionFileState::Finalized(_) = &*guard { - true - } else { - false - } + matches!(&*guard, ConstructionFileState::Finalized(_)) } fn finalized_buf(self) -> Bytes { @@ -403,7 +399,7 @@ impl AsyncRead for ArchiveSliceReader { } let read = AsyncRead::poll_read(Pin::new(&mut self.file), cx, buf); - if let Poll::Pending = read { + if read.is_pending() { return Poll::Pending; } diff --git a/src/storage/delta.rs b/src/storage/delta.rs index fb291899..01b65096 100644 --- a/src/storage/delta.rs +++ b/src/storage/delta.rs @@ -19,7 +19,7 @@ async fn safe_upto_bound( .retrieve_layer_stack_names_upto(layer.name(), upto) .await?; - let mut l = &*layer; + let mut l = layer; loop { let parent = match l.immediate_parent() { None => { @@ -487,8 +487,7 @@ mod tests { let delta_layer: Arc = Arc::new( BaseLayer::load_from_files([0, 0, 0, 0, 4], &delta_files) .await - .unwrap() - .into(), + .unwrap(), ); let expected: Vec<_> = layer diff --git a/src/storage/directory.rs b/src/storage/directory.rs index a9f2ad52..191971ea 100644 --- a/src/storage/directory.rs +++ b/src/storage/directory.rs @@ -190,7 +190,7 @@ impl DirectoryLabelStore { } fn get_label_from_data(name: String, data: &[u8]) -> io::Result