Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Clippy fixes #120

Open
wants to merge 8 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 3 additions & 8 deletions src/layer/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,8 @@ impl<F: 'static + FileLoad + FileStore> DictionarySetFileBuilder<F> {
///
/// Panics if the given predicate string is not a lexical successor of the previous node string.
pub fn add_predicate(&mut self, predicate: &str) -> u64 {
let id = self
.predicate_dictionary_builder
.add(Bytes::copy_from_slice(predicate.as_bytes()));

id
self.predicate_dictionary_builder
.add(Bytes::copy_from_slice(predicate.as_bytes()))
}

pub fn add_predicate_bytes(&mut self, predicate: Bytes) -> u64 {
Expand All @@ -82,9 +79,7 @@ impl<F: 'static + FileLoad + FileStore> DictionarySetFileBuilder<F> {
///
/// Panics if the given value string is not a lexical successor of the previous value string.
pub fn add_value(&mut self, value: TypedDictEntry) -> u64 {
let id = self.value_dictionary_builder.add(value);

id
self.value_dictionary_builder.add(value)
}

/// Add nodes from an iterable.
Expand Down
24 changes: 6 additions & 18 deletions src/layer/internal/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,9 +173,7 @@ impl<F: 'static + FileLoad + FileStore + Clone> BaseLayerFileBuilder<F> {
///
/// Panics if the given node string is not a lexical successor of the previous node string.
pub fn add_node(&mut self, node: &str) -> u64 {
let id = self.builder.add_node(node);

id
self.builder.add_node(node)
}

pub fn add_node_bytes(&mut self, node: Bytes) -> u64 {
Expand All @@ -186,9 +184,7 @@ impl<F: 'static + FileLoad + FileStore + Clone> BaseLayerFileBuilder<F> {
///
/// Panics if the given predicate string is not a lexical successor of the previous node string.
pub fn add_predicate(&mut self, predicate: &str) -> u64 {
let id = self.builder.add_predicate(predicate);

id
self.builder.add_predicate(predicate)
}

pub fn add_predicate_bytes(&mut self, predicate: Bytes) -> u64 {
Expand All @@ -199,9 +195,7 @@ impl<F: 'static + FileLoad + FileStore + Clone> BaseLayerFileBuilder<F> {
///
/// Panics if the given value string is not a lexical successor of the previous value string.
pub fn add_value(&mut self, value: TypedDictEntry) -> u64 {
let id = self.builder.add_value(value);

id
self.builder.add_value(value)
}

/// Add nodes from an iterable.
Expand All @@ -215,9 +209,7 @@ impl<F: 'static + FileLoad + FileStore + Clone> BaseLayerFileBuilder<F> {
<I as std::iter::IntoIterator>::IntoIter: Unpin + Send + Sync,
I: Unpin + Sync,
{
let ids = self.builder.add_nodes(nodes);

ids
self.builder.add_nodes(nodes)
}

pub fn add_nodes_bytes<I: 'static + IntoIterator<Item = Bytes> + Send>(
Expand All @@ -244,9 +236,7 @@ impl<F: 'static + FileLoad + FileStore + Clone> BaseLayerFileBuilder<F> {
<I as std::iter::IntoIterator>::IntoIter: Unpin + Send + Sync,
I: Unpin + Sync,
{
let ids = self.builder.add_predicates(predicates);

ids
self.builder.add_predicates(predicates)
}

pub fn add_predicates_bytes<I: 'static + IntoIterator<Item = Bytes> + Send>(
Expand All @@ -273,9 +263,7 @@ impl<F: 'static + FileLoad + FileStore + Clone> BaseLayerFileBuilder<F> {
<I as std::iter::IntoIterator>::IntoIter: Unpin + Send + Sync,
I: Unpin + Sync,
{
let ids = self.builder.add_values(values);

ids
self.builder.add_values(values)
}

/// Turn this builder into a phase 2 builder that will take triple data.
Expand Down
9 changes: 3 additions & 6 deletions src/layer/internal/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -524,10 +524,7 @@ impl InternalLayer {
}

pub fn is_rollup(&self) -> bool {
match self {
Rollup(_) => true,
_ => false,
}
matches!(self, Rollup(_))
}
}

Expand Down Expand Up @@ -635,7 +632,7 @@ impl Layer for InternalLayer {
parent_count = parent_count
- current_layer.node_dict_len() as u64
- current_layer.value_dict_len() as u64;
if corrected_id > parent_count as u64 {
if corrected_id > parent_count {
// subject, if it exists, is in this layer
corrected_id -= parent_count;
} else {
Expand Down Expand Up @@ -666,7 +663,7 @@ impl Layer for InternalLayer {
let mut corrected_id = id;
if let Some(parent) = current_layer.immediate_parent() {
parent_count -= current_layer.predicate_dict_len() as u64;
if corrected_id > parent_count as u64 {
if corrected_id > parent_count {
// subject, if it exists, is in this layer
corrected_id -= parent_count;
} else {
Expand Down
8 changes: 4 additions & 4 deletions src/layer/internal/object_iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ impl InternalLayerTripleObjectIterator {
s_p_adjacency_list: AdjacencyList,
) -> Self {
Self {
subjects: subjects,
objects: objects,
o_ps_adjacency_list: o_ps_adjacency_list,
s_p_adjacency_list: s_p_adjacency_list,
subjects,
objects,
o_ps_adjacency_list,
s_p_adjacency_list,
o_position: 0,
o_ps_position: 0,
peeked: None,
Expand Down
15 changes: 7 additions & 8 deletions src/layer/internal/subject_iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@ impl InternalLayerTripleSubjectIterator {
sp_o_adjacency_list: AdjacencyList,
) -> Self {
Self {
subjects: subjects,
s_p_adjacency_list: s_p_adjacency_list,
sp_o_adjacency_list: sp_o_adjacency_list,
subjects,
s_p_adjacency_list,
sp_o_adjacency_list,
s_position: 0,
s_p_position: 0,
sp_o_position: 0,
Expand Down Expand Up @@ -422,7 +422,7 @@ impl Iterator for InternalTripleStackIterator {
(Some(lowest_pos_index), Some(lowest_neg_index)) => {
let lowest_pos = self.positives[lowest_pos_index].peek().unwrap();
let lowest_neg = self.negatives[lowest_neg_index].peek().unwrap();
match lowest_pos.cmp(&lowest_neg) {
match lowest_pos.cmp(lowest_neg) {
Ordering::Less => {
// next change is an addition, and there's no matching removal
return Some((
Expand Down Expand Up @@ -479,7 +479,7 @@ mod tests {

#[tokio::test]
async fn base_triple_removal_iterator() {
let base_layer: InternalLayer = example_base_layer().await.into();
let base_layer: InternalLayer = example_base_layer().await;

let triples: Vec<_> = base_layer.internal_triple_removals().collect();
assert!(triples.is_empty());
Expand Down Expand Up @@ -811,7 +811,7 @@ mod tests {

async fn child_layer() -> InternalLayer {
let base_layer = example_base_layer().await;
let parent: Arc<InternalLayer> = Arc::new(base_layer.into());
let parent: Arc<InternalLayer> = Arc::new(base_layer);

let child_files = child_layer_files();

Expand All @@ -830,7 +830,6 @@ mod tests {
ChildLayer::load_from_files([5, 4, 3, 2, 1], parent, &child_files)
.await
.unwrap()
.into()
}

#[tokio::test]
Expand Down Expand Up @@ -1032,7 +1031,7 @@ mod tests {
async fn iterate_partial_stack() {
let (parent_id, layer) = create_stack_for_partial_tests().await;

let iterator = InternalTripleStackIterator::from_layer_stack(&*layer, parent_id).unwrap();
let iterator = InternalTripleStackIterator::from_layer_stack(&layer, parent_id).unwrap();
let changes: Vec<_> = iterator
.map(|t| (t.0, layer.id_triple_to_string(&t.1).unwrap()))
.collect();
Expand Down
19 changes: 8 additions & 11 deletions src/layer/layer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,8 @@ pub trait Layer: Send + Sync {
self.subject_id(&triple.subject).and_then(|subject| {
self.predicate_id(&triple.predicate).and_then(|predicate| {
match &triple.object {
ObjectType::Node(node) => self.object_node_id(&node),
ObjectType::Value(value) => self.object_value_id(&value),
ObjectType::Node(node) => self.object_node_id(node),
ObjectType::Value(value) => self.object_value_id(value),
}
.map(|object| IdTriple {
subject,
Expand Down Expand Up @@ -127,11 +127,11 @@ pub trait Layer: Send + Sync {
.unwrap_or(PossiblyResolved::Unresolved(triple.predicate)),
object: match &triple.object {
ObjectType::Node(node) => self
.object_node_id(&node)
.object_node_id(node)
.map(PossiblyResolved::Resolved)
.unwrap_or(PossiblyResolved::Unresolved(triple.object)),
ObjectType::Value(value) => self
.object_value_id(&value)
.object_value_id(value)
.map(PossiblyResolved::Resolved)
.unwrap_or(PossiblyResolved::Unresolved(triple.object)),
},
Expand Down Expand Up @@ -270,7 +270,7 @@ impl<T: Clone + PartialEq + Eq + PartialOrd + Ord + Hash> PossiblyResolved<T> {
/// Return a PossiblyResolved with the inner value as a reference.
pub fn as_ref(&self) -> PossiblyResolved<&T> {
match self {
Self::Unresolved(u) => PossiblyResolved::Unresolved(&u),
Self::Unresolved(u) => PossiblyResolved::Unresolved(u),
Self::Resolved(id) => PossiblyResolved::Resolved(*id),
}
}
Expand Down Expand Up @@ -426,8 +426,7 @@ mod tests {
let base: Arc<InternalLayer> = Arc::new(
BaseLayer::load_from_files([1, 2, 3, 4, 5], &files)
.await
.unwrap()
.into(),
.unwrap(),
);

let files = child_layer_files();
Expand All @@ -439,8 +438,7 @@ mod tests {
let child: Arc<InternalLayer> = Arc::new(
ChildLayer::load_from_files([5, 4, 3, 2, 1], base.clone(), &files)
.await
.unwrap()
.into(),
.unwrap(),
);

// TODO why are we not using these results?
Expand Down Expand Up @@ -633,8 +631,7 @@ mod tests {
let base: Arc<InternalLayer> = Arc::new(
BaseLayer::load_from_files([1, 2, 3, 4, 5], &files)
.await
.unwrap()
.into(),
.unwrap(),
);

let mut results: Vec<_> = base
Expand Down
4 changes: 2 additions & 2 deletions src/layer/simple_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ fn zero_equivalents(
let mut removals_iter = removals.iter_mut().peekable();
'outer: for mut addition in additions {
let mut next = removals_iter.peek();
if next == None {
if next.is_none() {
break;
}

Expand All @@ -296,7 +296,7 @@ fn zero_equivalents(
removals_iter.next().unwrap();
next = removals_iter.peek();

if next == None {
if next.is_none() {
break 'outer;
} else if next >= Some(&addition) {
break;
Expand Down
8 changes: 2 additions & 6 deletions src/storage/archive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -608,11 +608,7 @@ impl ConstructionFile {

fn is_finalized(&self) -> bool {
let guard = self.0.read().unwrap();
if let ConstructionFileState::Finalized(_) = &*guard {
true
} else {
false
}
matches!(&*guard, ConstructionFileState::Finalized(_))
}

fn finalized_buf(self) -> Bytes {
Expand Down Expand Up @@ -953,7 +949,7 @@ impl AsyncRead for ArchiveSliceReader {
}

let read = AsyncRead::poll_read(Pin::new(&mut self.file), cx, buf);
if let Poll::Pending = read {
if read.is_pending() {
return Poll::Pending;
}

Expand Down
13 changes: 6 additions & 7 deletions src/storage/consts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -505,7 +505,7 @@ lazy_static! {
]);
}

pub const SHARED_REQUIRED_FILES: [&'static str; 8] = [
pub const SHARED_REQUIRED_FILES: [&str; 8] = [
FILENAMES.node_dictionary_blocks,
FILENAMES.node_dictionary_offsets,
FILENAMES.predicate_dictionary_blocks,
Expand All @@ -516,7 +516,7 @@ pub const SHARED_REQUIRED_FILES: [&'static str; 8] = [
FILENAMES.value_dictionary_offsets,
];

pub const SHARED_OPTIONAL_FILES: [&'static str; 7] = [
pub const SHARED_OPTIONAL_FILES: [&str; 7] = [
FILENAMES.node_value_idmap_bits,
FILENAMES.node_value_idmap_bit_index_blocks,
FILENAMES.node_value_idmap_bit_index_sblocks,
Expand All @@ -526,7 +526,7 @@ pub const SHARED_OPTIONAL_FILES: [&'static str; 7] = [
FILENAMES.rollup,
];

pub const BASE_LAYER_REQUIRED_FILES: [&'static str; 15] = [
pub const BASE_LAYER_REQUIRED_FILES: [&str; 15] = [
FILENAMES.base_s_p_adjacency_list_nums,
FILENAMES.base_s_p_adjacency_list_bits,
FILENAMES.base_s_p_adjacency_list_bit_index_blocks,
Expand All @@ -544,10 +544,9 @@ pub const BASE_LAYER_REQUIRED_FILES: [&'static str; 15] = [
FILENAMES.base_predicate_wavelet_tree_bit_index_sblocks,
];

pub const BASE_LAYER_OPTIONAL_FILES: [&'static str; 2] =
[FILENAMES.base_subjects, FILENAMES.base_objects];
pub const BASE_LAYER_OPTIONAL_FILES: [&str; 2] = [FILENAMES.base_subjects, FILENAMES.base_objects];

pub const CHILD_LAYER_REQUIRED_FILES: [&'static str; 31] = [
pub const CHILD_LAYER_REQUIRED_FILES: [&str; 31] = [
FILENAMES.parent,
FILENAMES.pos_s_p_adjacency_list_nums,
FILENAMES.pos_s_p_adjacency_list_bits,
Expand Down Expand Up @@ -581,7 +580,7 @@ pub const CHILD_LAYER_REQUIRED_FILES: [&'static str; 31] = [
FILENAMES.neg_predicate_wavelet_tree_bit_index_sblocks,
];

pub const CHILD_LAYER_OPTIONAL_FILES: [&'static str; 4] = [
pub const CHILD_LAYER_OPTIONAL_FILES: [&str; 4] = [
FILENAMES.pos_subjects,
FILENAMES.pos_objects,
FILENAMES.neg_subjects,
Expand Down
5 changes: 2 additions & 3 deletions src/storage/delta.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ async fn safe_upto_bound<S: LayerStore>(
.retrieve_layer_stack_names_upto(layer.name(), upto)
.await?;

let mut l = &*layer;
let mut l = layer;
loop {
let parent = match l.immediate_parent() {
None => {
Expand Down Expand Up @@ -487,8 +487,7 @@ mod tests {
let delta_layer: Arc<InternalLayer> = Arc::new(
BaseLayer::load_from_files([0, 0, 0, 0, 4], &delta_files)
.await
.unwrap()
.into(),
.unwrap(),
);

let expected: Vec<_> = layer
Expand Down
6 changes: 3 additions & 3 deletions src/storage/directory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ impl DirectoryLabelStore {
}

fn get_label_from_data(name: String, data: &[u8]) -> io::Result<Label> {
let s = String::from_utf8_lossy(&data);
let s = String::from_utf8_lossy(data);
let lines: Vec<&str> = s.lines().collect();
if lines.len() != 2 {
return Err(io::Error::new(
Expand All @@ -208,7 +208,7 @@ fn get_label_from_data(name: String, data: &[u8]) -> io::Result<Label> {
let version_str = &lines[0];
let layer_str = &lines[1];

let version = u64::from_str_radix(version_str, 10);
let version = version_str.parse::<u64>();
if version.is_err() {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
Expand Down Expand Up @@ -562,7 +562,7 @@ mod tests {

let map = file.map().await.unwrap();

assert_eq!(&vec![1, 2, 3][..], &map.as_ref()[..]);
assert_eq!(&vec![1, 2, 3][..], map.as_ref());
}

#[tokio::test]
Expand Down
Loading