Skip to content

Commit

Permalink
fix: all of the clippy lints
Browse files Browse the repository at this point in the history
This for the most part just applies suggested fixes by clippy with a few
exceptions:

- Generated type crates had additional allows added since we can't
  control what code gets made
- Things that couldn't be automatically fixed were done so manually in
  particular adding a Send bound for traits that created a Future that
  should be Send

We also had to fix a build issue by adding a feature for tokio-compat
due to the upgrade of deps. The workspace crate was updated accordingly.
  • Loading branch information
mgattozzi committed Jan 9, 2024
1 parent eac59f5 commit 6f5006d
Show file tree
Hide file tree
Showing 24 changed files with 60 additions and 58 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 6 additions & 1 deletion arrow_util/src/bitset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,12 @@ mod tests {
}

fn iter_set_bools(bools: &[bool]) -> impl Iterator<Item = usize> + '_ {
bools.iter().enumerate().filter_map(|(x, y)| y.then(|| x))
bools
.iter()
.enumerate()
// Filter out all y that are not true and then return only x
.filter(|&(_, y)| *y)
.map(|(x, _)| x)
}

#[test]
Expand Down
5 changes: 1 addition & 4 deletions arrow_util/src/dictionary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,10 +128,7 @@ impl<K: AsPrimitive<usize> + FromPrimitive + Zero> StringDictionary<K> {
}

fn hash_str(hasher: &ahash::RandomState, value: &str) -> u64 {
use std::hash::{BuildHasher, Hash, Hasher};
let mut state = hasher.build_hasher();
value.hash(&mut state);
state.finish()
hasher.hash_one(value)
}

impl StringDictionary<i32> {
Expand Down
3 changes: 3 additions & 0 deletions cache_system/src/addressable_heap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -478,9 +478,12 @@ mod tests {
}

fn peek(&self) -> Option<(&u8, &String, &i8)> {
#[allow(clippy::map_identity)]
self.inner
.iter()
.min_by_key(|(k, _v, o)| (o, k))
// This is a false positive as this actually changes
// Option<&(u8, String, i8)> -> Option<(&u8, &String, &i8)>
.map(|(k, v, o)| (k, v, o))
}

Expand Down
2 changes: 1 addition & 1 deletion client_util/src/connection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ impl From<tonic::transport::Error> for Error {
let details = source
.source()
.map(|e| format!(" ({e})"))
.unwrap_or_else(|| "".to_string());
.unwrap_or_default();

Self::TransportError { source, details }
}
Expand Down
2 changes: 1 addition & 1 deletion data_types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1521,7 +1521,7 @@ impl TableSummary {
pub fn total_count(&self) -> u64 {
// Assumes that all tables have the same number of rows, so
// pick the first one
let count = self.columns.get(0).map(|c| c.total_count()).unwrap_or(0);
let count = self.columns.first().map(|c| c.total_count()).unwrap_or(0);

// Validate that the counts are consistent across columns
for c in &self.columns {
Expand Down
6 changes: 5 additions & 1 deletion generated_types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,11 @@
// crates because of all the generated code it contains that we don't have much
// control over.
#![deny(rustdoc::broken_intra_doc_links, rustdoc::bare_urls)]
#![allow(clippy::derive_partial_eq_without_eq, clippy::needless_borrow)]
#![allow(
clippy::derive_partial_eq_without_eq,
clippy::needless_borrow,
clippy::needless_borrows_for_generic_args
)]
#![warn(unused_crate_dependencies)]

// Workaround for "unused crate" lint false positives.
Expand Down
2 changes: 1 addition & 1 deletion import_export/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,5 @@ schema = { path = "../schema" }
serde_json = "1.0.107"
thiserror = "1.0.48"
tokio = { version = "1.32" }
tokio-util = { version = "0.7.9" }
tokio-util = { version = "0.7.9", features = ["compat"] }
workspace-hack = { version = "0.1", path = "../workspace-hack" }
3 changes: 1 addition & 2 deletions import_export/src/file/export.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,7 @@ impl RemoteExporter {
// Export the metadata for the table. Since all
// parquet_files are part of the same table, use the table_id
// from the first parquet_file
let table_id = parquet_files
.get(0)
let table_id = parquet_files.first()
.map(|parquet_file| parquet_file.table_id);
if let Some(table_id) = table_id {
self.export_table_metadata(&output_directory, table_id)
Expand Down
22 changes: 9 additions & 13 deletions influxdb3_server/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,12 +179,8 @@ mod tests {
let addr = get_free_port();
let trace_header_parser = trace_http::ctx::TraceHeaderParser::new();
let metrics = Arc::new(metric::Registry::new());
let common_state = crate::CommonServerState::new(
Arc::clone(&metrics),
None,
trace_header_parser,
addr.clone(),
);
let common_state =
crate::CommonServerState::new(Arc::clone(&metrics), None, trace_header_parser, addr);
let catalog = Arc::new(influxdb3_write::catalog::Catalog::new());
let object_store: Arc<DynObjectStore> = Arc::new(object_store::memory::InMemory::new());
let parquet_store =
Expand Down Expand Up @@ -240,7 +236,7 @@ mod tests {
"| a | 1970-01-01T00:00:00.000000123 | 1 |",
"+------+-------------------------------+-----+",
];
let actual: Vec<_> = body.split("\n").into_iter().collect();
let actual: Vec<_> = body.split('\n').collect();
assert_eq!(
expected, actual,
"\n\nexpected:\n\n{:#?}\nactual:\n\n{:#?}\n\n",
Expand All @@ -251,9 +247,9 @@ mod tests {
}

pub(crate) async fn write_lp(
server: impl Into<String>,
database: impl Into<String>,
lp: impl Into<String>,
server: impl Into<String> + Send,
database: impl Into<String> + Send,
lp: impl Into<String> + Send,
authorization: Option<&str>,
) -> Response<Body> {
let server = server.into();
Expand All @@ -276,9 +272,9 @@ mod tests {
}

pub(crate) async fn query(
server: impl Into<String>,
database: impl Into<String>,
query: impl Into<String>,
server: impl Into<String> + Send,
database: impl Into<String> + Send,
query: impl Into<String> + Send,
authorization: Option<&str>,
) -> Response<Body> {
let client = Client::new();
Expand Down
2 changes: 1 addition & 1 deletion influxrpc_parser/src/predicate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ fn build_node(expr: &Expr, strings_are_regex: bool) -> Result<RPCNode> {
),
Expr::Cast { expr, data_type } => match data_type {
sqlparser::ast::DataType::Custom(ident, _modifiers) => {
if let Some(Ident { value, .. }) = ident.0.get(0) {
if let Some(Ident { value, .. }) = ident.0.first() {
// See https://docs.influxdata.com/influxdb/v1.8/query_language/explore-data/#syntax
match value.as_str() {
"field" => {
Expand Down
6 changes: 5 additions & 1 deletion ingester_query_grpc/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,11 @@
missing_debug_implementations,
unused_crate_dependencies
)]
#![allow(clippy::derive_partial_eq_without_eq, clippy::needless_borrow)]
#![allow(
clippy::derive_partial_eq_without_eq,
clippy::needless_borrow,
clippy::needless_borrows_for_generic_args
)]

// Workaround for "unused crate" lint false positives.
use workspace_hack as _;
Expand Down
9 changes: 3 additions & 6 deletions iox_data_generator/src/tag_set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,7 @@ impl GeneratedTagSets {
let parent_has_ones = self
.has_one_values
.entry(parent_has_one_key.as_str().to_owned())
.or_insert_with(ParentToHasOnes::default);
.or_default();

let has_one_values = self.values.get(has_one.as_str()).expect(
"add_has_ones should never be called before the values collection is created",
Expand All @@ -354,10 +354,7 @@ impl GeneratedTagSets {
ones_iter.next().unwrap()
});

let has_one_map = parent_has_ones
.id_to_has_ones
.entry(parent.id)
.or_insert_with(BTreeMap::new);
let has_one_map = parent_has_ones.id_to_has_ones.entry(parent.id).or_default();
has_one_map.insert(Arc::clone(&parent_has_one_key), Arc::clone(one_val));
}
}
Expand Down Expand Up @@ -414,7 +411,7 @@ impl GeneratedTagSets {
let child_vals = self
.child_values
.entry(child_values_key(belongs_to, &spec.name))
.or_insert_with(BTreeMap::new);
.or_default();
child_vals.insert(parent.id, parent_owned);
}
self.values.insert(spec.name.to_string(), all_children);
Expand Down
1 change: 1 addition & 0 deletions iox_query/src/exec/seriesset/converter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -625,6 +625,7 @@ impl PartialEq for SortableSeries {
impl Eq for SortableSeries {}

impl PartialOrd for SortableSeries {
#[allow(clippy::non_canonical_partial_ord_impl)]
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.tag_vals.partial_cmp(&other.tag_vals)
}
Expand Down
2 changes: 1 addition & 1 deletion iox_query/src/logical_optimizer/handle_gapfill.rs
Original file line number Diff line number Diff line change
Expand Up @@ -620,7 +620,7 @@ mod test {
fn optimize(plan: &LogicalPlan) -> Result<Option<LogicalPlan>> {
let optimizer = Optimizer::with_rules(vec![Arc::new(HandleGapFill)]);
optimizer.optimize_recursively(
optimizer.rules.get(0).unwrap(),
optimizer.rules.first().unwrap(),
plan,
&OptimizerContext::new(),
)
Expand Down
2 changes: 1 addition & 1 deletion iox_query/src/physical_optimizer/dedup/dedup_sort_order.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ impl PhysicalOptimizerRule for DedupSortOrder {
.iter()
.filter(|sort_key| {
match sort_key.get_index_of(col) {
Some(idx) if idx == 0 => {
Some(0) => {
// Column next in sort order from this chunks PoV. This is good.
true
}
Expand Down
4 changes: 1 addition & 3 deletions iox_query/src/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,7 @@ impl TestDatabase {
/// Add a test chunk to the database
pub fn add_chunk(&self, partition_key: &str, chunk: Arc<TestChunk>) -> &Self {
let mut partitions = self.partitions.lock();
let chunks = partitions
.entry(partition_key.to_string())
.or_insert_with(BTreeMap::new);
let chunks = partitions.entry(partition_key.to_string()).or_default();
chunks.insert(chunk.id(), chunk);
self
}
Expand Down
11 changes: 4 additions & 7 deletions iox_query_influxql/src/plan/expr_type_evaluator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -294,8 +294,7 @@ impl<'a> TypeEvaluator<'a> {
// These functions require a single numeric as input and return a float
name @ ("sin" | "cos" | "tan" | "atan" | "exp" | "log" | "ln" | "log2" | "log10"
| "sqrt") => {
match arg_types
.get(0)
match arg_types.first()
.ok_or_else(|| error::map::query(format!("{name} expects 1 argument")))?
{
Some(
Expand All @@ -310,8 +309,7 @@ impl<'a> TypeEvaluator<'a> {

// These functions require a single float as input and return a float
name @ ("asin" | "acos") => {
match arg_types
.get(0)
match arg_types.first()
.ok_or_else(|| error::map::query(format!("{name} expects 1 argument")))?
{
Some(VarRefDataType::Float) | None => Ok(Some(VarRefDataType::Float)),
Expand All @@ -324,7 +322,7 @@ impl<'a> TypeEvaluator<'a> {

// These functions require two numeric arguments and return a float
name @ ("atan2" | "pow") => {
let (Some(arg0), Some(arg1)) = (arg_types.get(0), arg_types.get(1)) else {
let (Some(arg0), Some(arg1)) = (arg_types.first(), arg_types.get(1)) else {
return error::query(format!("{name} expects 2 arguments"));
};

Expand All @@ -347,8 +345,7 @@ impl<'a> TypeEvaluator<'a> {

// These functions return the same data type as their input
name @ ("abs" | "floor" | "ceil" | "round") => {
match arg_types
.get(0)
match arg_types.first()
.cloned()
.ok_or_else(|| error::map::query(format!("{name} expects 1 argument")))?
{
Expand Down
12 changes: 6 additions & 6 deletions iox_query_influxql/src/plan/planner/select.rs
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ impl<'a> Selector<'a> {
));
}
Ok(Self::First {
field_key: Self::identifier(call.args.get(0).unwrap())?,
field_key: Self::identifier(call.args.first().unwrap())?,
})
}

Expand All @@ -300,7 +300,7 @@ impl<'a> Selector<'a> {
));
}
Ok(Self::Last {
field_key: Self::identifier(call.args.get(0).unwrap())?,
field_key: Self::identifier(call.args.first().unwrap())?,
})
}

Expand All @@ -312,7 +312,7 @@ impl<'a> Selector<'a> {
));
}
Ok(Self::Max {
field_key: Self::identifier(call.args.get(0).unwrap())?,
field_key: Self::identifier(call.args.first().unwrap())?,
})
}

Expand All @@ -324,7 +324,7 @@ impl<'a> Selector<'a> {
));
}
Ok(Self::Min {
field_key: Self::identifier(call.args.get(0).unwrap())?,
field_key: Self::identifier(call.args.first().unwrap())?,
})
}

Expand All @@ -336,7 +336,7 @@ impl<'a> Selector<'a> {
));
}
Ok(Self::Percentile {
field_key: Self::identifier(call.args.get(0).unwrap())?,
field_key: Self::identifier(call.args.first().unwrap())?,
n: Self::literal_num(call.args.get(1).unwrap())?,
})
}
Expand All @@ -349,7 +349,7 @@ impl<'a> Selector<'a> {
));
}
Ok(Self::Sample {
field_key: Self::identifier(call.args.get(0).unwrap())?,
field_key: Self::identifier(call.args.first().unwrap())?,
n: Self::literal_int(call.args.get(1).unwrap())?,
})
}
Expand Down
2 changes: 1 addition & 1 deletion ioxd_common/src/server_type.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ impl From<tonic::transport::Error> for RpcError {
let details = source
.source()
.map(|e| format!(" ({e})"))
.unwrap_or_else(|| "".to_string());
.unwrap_or_default();

Self::TransportError { source, details }
}
Expand Down
4 changes: 2 additions & 2 deletions schema/src/sort.rs
Original file line number Diff line number Diff line change
Expand Up @@ -404,8 +404,8 @@ pub fn adjust_sort_key_columns(
let existing_columns_without_time = catalog_sort_key
.iter()
.map(|(col, _opts)| col)
.cloned()
.filter(|col| TIME_COLUMN_NAME != col.as_ref());
.filter(|&col| TIME_COLUMN_NAME != col.as_ref())
.cloned();
let new_columns: Vec<_> = primary_key
.iter()
.filter(|col| !catalog_sort_key.contains(col))
Expand Down
4 changes: 2 additions & 2 deletions service_grpc_flight/src/keep_alive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -375,7 +375,7 @@ mod tests {
} else {
s
};
let s = panic_on_stream_timeout(s, Duration::from_millis(250));
s

(panic_on_stream_timeout(s, Duration::from_millis(250))) as _
}
}
4 changes: 2 additions & 2 deletions wal/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -863,7 +863,7 @@ mod tests {
assert_eq!(wal_entries.len(), 2);
let write_op_entries = wal_entries.into_iter().flatten().collect::<Vec<_>>();
assert_eq!(write_op_entries.len(), 3);
assert_matches!(write_op_entries.get(0), Some(got_op1) => {
assert_matches!(write_op_entries.first(), Some(got_op1) => {
assert_op_shape(got_op1, &w1);
});
assert_matches!(write_op_entries.get(1), Some(got_op2) => {
Expand Down Expand Up @@ -916,7 +916,7 @@ mod tests {
// error is thrown
assert_matches!(decoder.next(), Some(Ok(batch)) => {
assert_eq!(batch.len(), 1);
assert_op_shape(batch.get(0).unwrap(), &good_write);
assert_op_shape(batch.first().unwrap(), &good_write);
});
assert_matches!(
decoder.next(),
Expand Down
2 changes: 1 addition & 1 deletion workspace-hack/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ strum = { version = "0.25", features = ["derive"] }
thrift = { version = "0.17" }
tokio = { version = "1", features = ["full", "tracing"] }
tokio-stream = { version = "0.1", features = ["fs", "net"] }
tokio-util = { version = "0.7", features = ["codec", "io"] }
tokio-util = { version = "0.7", features = ["codec", "compat", "io"] }
tonic = { version = "0.9", features = ["tls-webpki-roots"] }
tower = { version = "0.4", features = ["balance", "buffer", "limit", "timeout", "util"] }
tracing = { version = "0.1", features = ["log", "max_level_trace", "release_max_level_trace"] }
Expand Down

0 comments on commit 6f5006d

Please sign in to comment.