diff --git a/.github/deploy_manylinux.sh b/.github/deploy_manylinux.sh index 941543d9e754..993f4b39f2f5 100644 --- a/.github/deploy_manylinux.sh +++ b/.github/deploy_manylinux.sh @@ -8,7 +8,7 @@ ls -la rm py-polars/README.md cp README.md py-polars/README.md cd py-polars -rustup override set nightly-2023-06-23 +rustup override set nightly-2023-07-27 export RUSTFLAGS='-C target-feature=+fxsr,+sse,+sse2,+sse3,+ssse3,+sse4.1,+sse4.2,+popcnt,+avx,+fma' # first the default release diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index df75c7aa41ea..9eea7d2f3610 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -6,7 +6,7 @@ on: - py-* env: - RUST_TOOLCHAIN: nightly-2023-06-23 + RUST_TOOLCHAIN: nightly-2023-07-27 PYTHON_VERSION: '3.8' MATURIN_VERSION: '1.1.0' MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }} diff --git a/examples/python_rust_compiled_function/src/lib.rs b/examples/python_rust_compiled_function/src/lib.rs index a51941903a87..7e67a9c29fe1 100644 --- a/examples/python_rust_compiled_function/src/lib.rs +++ b/examples/python_rust_compiled_function/src/lib.rs @@ -18,7 +18,7 @@ fn hamming_distance(series_a: &PyAny, series_b: &PyAny) -> PyResult { fn hamming_distance_impl(a: &Series, b: &Series) -> PolarsResult { Ok(a.utf8()? .into_iter() - .zip(b.utf8()?.into_iter()) + .zip(b.utf8()?) .map(|(lhs, rhs)| hamming_distance_strs(lhs, rhs)) .collect()) } diff --git a/polars/polars-arrow/src/compute/take/fixed_size_list.rs b/polars/polars-arrow/src/compute/take/fixed_size_list.rs index 87d86345f48e..a29bb17fe976 100644 --- a/polars/polars-arrow/src/compute/take/fixed_size_list.rs +++ b/polars/polars-arrow/src/compute/take/fixed_size_list.rs @@ -15,7 +15,9 @@ pub unsafe fn take_unchecked(values: &FixedSizeListArray, indices: &IdxArr) -> F ) { let idx = indices.values().as_slice(); let child_values = values.values(); - let DataType::FixedSizeList(_, width) = values.data_type() else {unreachable!()}; + let DataType::FixedSizeList(_, width) = values.data_type() else { + unreachable!() + }; with_match_primitive_type!(primitive, |$T| { let arr: &PrimitiveArray<$T> = child_values.as_any().downcast_ref().unwrap(); diff --git a/polars/polars-arrow/src/floats/ord.rs b/polars/polars-arrow/src/floats/ord.rs index 52d71bc3382e..0a729eea65a5 100644 --- a/polars/polars-arrow/src/floats/ord.rs +++ b/polars/polars-arrow/src/floats/ord.rs @@ -12,6 +12,7 @@ use crate::kernels::rolling::compare_fn_nan_max; #[repr(transparent)] pub struct OrdFloat(T); +#[allow(clippy::incorrect_partial_ord_impl_on_ord_type)] impl PartialOrd for OrdFloat { fn partial_cmp(&self, other: &Self) -> Option { Some(compare_fn_nan_max(&self.0, &other.0)) diff --git a/polars/polars-arrow/src/kernels/agg_mean.rs b/polars/polars-arrow/src/kernels/agg_mean.rs index 26b04e557667..8b451774a943 100644 --- a/polars/polars-arrow/src/kernels/agg_mean.rs +++ b/polars/polars-arrow/src/kernels/agg_mean.rs @@ -1,4 +1,7 @@ -use std::simd::{Mask, Simd, SimdCast, SimdElement, SimdFloat, StdFloat, ToBitMask}; +use std::simd::{ + LaneCount, Mask, Simd, SimdCast, SimdElement, SimdFloat, SimdInt, SimdUint, StdFloat, + SupportedLaneCount, ToBitMask, +}; use arrow::array::{Array, PrimitiveArray}; use arrow::bitmap::utils::{BitChunkIterExact, BitChunksExact}; @@ -11,10 +14,43 @@ use num_traits::ToPrimitive; use crate::data_types::IsFloat; use crate::utils::with_match_primitive_type; +// TODO! try to remove this if we can cast again directly +pub trait SimdCastPl +where + LaneCount: SupportedLaneCount, +{ + fn cast_custom(self) -> Simd; +} + +macro_rules! impl_cast_custom { + ($_type:ty) => { + impl SimdCastPl for Simd<$_type, N> + where + LaneCount: SupportedLaneCount, + { + fn cast_custom(self) -> Simd { + self.cast::() + } + } + }; +} + +impl_cast_custom!(u8); +impl_cast_custom!(u16); +impl_cast_custom!(u32); +impl_cast_custom!(u64); +impl_cast_custom!(i8); +impl_cast_custom!(i16); +impl_cast_custom!(i32); +impl_cast_custom!(i64); +impl_cast_custom!(f32); +impl_cast_custom!(f64); + #[multiversion(targets = "simd")] fn nonnull_sum_as_f64(values: &[T]) -> f64 where T: NativeType + SimdElement + ToPrimitive + SimdCast, + Simd: SimdCastPl<8>, { // we choose 8 as that the maximum size of f64x8 -> 512bit wide const LANES: usize = 8; @@ -22,7 +58,7 @@ where let mut reduced: Simd = Simd::splat(0.0); for chunk in simd_vals { - reduced += chunk.cast::(); + reduced += chunk.cast_custom::(); } unsafe { @@ -43,6 +79,7 @@ fn null_sum_as_f64_impl(values: &[T], mut validity_masks: I) -> f64 where T: NativeType + SimdElement + ToPrimitive + IsFloat + SimdCast, I: BitChunkIterExact, + Simd: SimdCastPl<8>, { const LANES: usize = 8; let mut chunks = values.chunks_exact(LANES); @@ -54,7 +91,7 @@ where |acc, (chunk, validity_chunk)| { // safety: exact size chunks let chunk: [T; LANES] = unsafe { chunk.try_into().unwrap_unchecked() }; - let chunk = Simd::from(chunk).cast::(); + let chunk = Simd::from(chunk).cast_custom::(); // construct [bools] let mask = Mask::::from_bitmask(validity_chunk); @@ -107,6 +144,7 @@ where fn null_sum_as_f64(values: &[T], bitmap: &Bitmap) -> f64 where T: NativeType + SimdElement + ToPrimitive + IsFloat + SimdCast, + Simd: SimdCastPl<8>, { let (slice, offset, length) = bitmap.as_slice(); if offset == 0 { diff --git a/polars/polars-arrow/src/kernels/ewm/variance.rs b/polars/polars-arrow/src/kernels/ewm/variance.rs index 64d04094e326..fb7cae40c24b 100644 --- a/polars/polars-arrow/src/kernels/ewm/variance.rs +++ b/polars/polars-arrow/src/kernels/ewm/variance.rs @@ -37,7 +37,7 @@ where let res = xs .into_iter() - .zip(ys.into_iter()) + .zip(ys) .enumerate() .map(|(i, (opt_x, opt_y))| { let is_observation = opt_x.is_some() && opt_y.is_some(); diff --git a/polars/polars-core/src/chunked_array/builder/list/categorical.rs b/polars/polars-core/src/chunked_array/builder/list/categorical.rs index 7a4043a0bae9..fb46f7064ed5 100644 --- a/polars/polars-core/src/chunked_array/builder/list/categorical.rs +++ b/polars/polars-core/src/chunked_array/builder/list/categorical.rs @@ -14,7 +14,9 @@ impl ListCategoricalChunkedBuilder { ) -> Self { let inner = ListPrimitiveChunkedBuilder::new(name, capacity, values_capacity, logical_type.clone()); - let DataType::Categorical(Some(rev_map)) = logical_type else { panic!("expected categorical type") }; + let DataType::Categorical(Some(rev_map)) = logical_type else { + panic!("expected categorical type") + }; Self { inner, inner_dtype: RevMapMerger::new(rev_map), @@ -24,7 +26,9 @@ impl ListCategoricalChunkedBuilder { impl ListBuilderTrait for ListCategoricalChunkedBuilder { fn append_series(&mut self, s: &Series) -> PolarsResult<()> { - let DataType::Categorical(Some(rev_map)) = s.dtype() else { polars_bail!(ComputeError: "expected categorical type")}; + let DataType::Categorical(Some(rev_map)) = s.dtype() else { + polars_bail!(ComputeError: "expected categorical type") + }; self.inner_dtype.merge_map(rev_map)?; self.inner.append_series(s) } diff --git a/polars/polars-core/src/chunked_array/builder/list/dtypes.rs b/polars/polars-core/src/chunked_array/builder/list/dtypes.rs index 5ad2aec7cfb7..50b26d68a297 100644 --- a/polars/polars-core/src/chunked_array/builder/list/dtypes.rs +++ b/polars/polars-core/src/chunked_array/builder/list/dtypes.rs @@ -28,7 +28,9 @@ impl DtypeMerger { match self { #[cfg(feature = "dtype-categorical")] DtypeMerger::Categorical(merger) => { - let DataType::Categorical(Some(rev_map)) = dtype else { polars_bail!(ComputeError: "expected categorical rev-map") }; + let DataType::Categorical(Some(rev_map)) = dtype else { + polars_bail!(ComputeError: "expected categorical rev-map") + }; return merger.merge_map(rev_map); } DtypeMerger::Other(Some(set_dtype)) => { diff --git a/polars/polars-core/src/chunked_array/from.rs b/polars/polars-core/src/chunked_array/from.rs index c3b0938bc272..f56efa466516 100644 --- a/polars/polars-core/src/chunked_array/from.rs +++ b/polars/polars-core/src/chunked_array/from.rs @@ -1,6 +1,7 @@ use super::*; #[allow(clippy::ptr_arg)] +#[allow(clippy::needless_pass_by_ref_mut)] fn from_chunks_list_dtype(chunks: &mut Vec, dtype: DataType) -> DataType { // ensure we don't get List let dtype = if let Some(arr) = chunks.get(0) { diff --git a/polars/polars-core/src/chunked_array/list/iterator.rs b/polars/polars-core/src/chunked_array/list/iterator.rs index 1137f9e5f566..253727531269 100644 --- a/polars/polars-core/src/chunked_array/list/iterator.rs +++ b/polars/polars-core/src/chunked_array/list/iterator.rs @@ -232,10 +232,8 @@ mod test { builder.append_series(&Series::new("", &[1, 1])).unwrap(); let ca = builder.finish(); - ca.amortized_iter() - .zip(ca.into_iter()) - .for_each(|(s1, s2)| { - assert!(s1.unwrap().as_ref().series_equal(&s2.unwrap())); - }); + ca.amortized_iter().zip(&ca).for_each(|(s1, s2)| { + assert!(s1.unwrap().as_ref().series_equal(&s2.unwrap())); + }); } } diff --git a/polars/polars-core/src/chunked_array/logical/categorical/builder.rs b/polars/polars-core/src/chunked_array/logical/categorical/builder.rs index af6ebe4e3965..4656dd245d03 100644 --- a/polars/polars-core/src/chunked_array/logical/categorical/builder.rs +++ b/polars/polars-core/src/chunked_array/logical/categorical/builder.rs @@ -380,7 +380,7 @@ impl<'a> CategoricalChunkedBuilder<'a> { let cache = &mut crate::STRING_CACHE.lock_map(); id = cache.uuid; - for (s, h) in values.values_iter().zip(hashes.into_iter()) { + for (s, h) in values.values_iter().zip(hashes) { let global_idx = cache.insert_from_hash(h, s); // safety: // we allocated enough @@ -558,7 +558,7 @@ mod test { let mut builder1 = CategoricalChunkedBuilder::new("foo", 10); let mut builder2 = CategoricalChunkedBuilder::new("foo", 10); builder1.drain_iter(vec![None, Some("hello"), Some("vietnam")]); - builder2.drain_iter(vec![Some("hello"), None, Some("world")].into_iter()); + builder2.drain_iter(vec![Some("hello"), None, Some("world")]); let s = builder1.finish().into_series(); assert_eq!(s.str_value(0).unwrap(), "null"); diff --git a/polars/polars-core/src/chunked_array/logical/categorical/merge.rs b/polars/polars-core/src/chunked_array/logical/categorical/merge.rs index 6612d0979de9..7e526168f051 100644 --- a/polars/polars-core/src/chunked_array/logical/categorical/merge.rs +++ b/polars/polars-core/src/chunked_array/logical/categorical/merge.rs @@ -48,7 +48,9 @@ pub(crate) struct RevMapMerger { impl RevMapMerger { pub(crate) fn new(rev_map: Arc) -> Self { - let RevMapping::Global(_, _, id) = rev_map.as_ref() else { panic!("impl error") }; + let RevMapping::Global(_, _, id) = rev_map.as_ref() else { + panic!("impl error") + }; RevMapMerger { state: None, id: *id, @@ -57,7 +59,9 @@ impl RevMapMerger { } fn init_state(&mut self) { - let RevMapping::Global(map, slots, _) = self.original.as_ref() else { unreachable!() }; + let RevMapping::Global(map, slots, _) = self.original.as_ref() else { + unreachable!() + }; self.state = Some(State { map: (*map).clone(), slots: slots_to_mut(slots), @@ -70,7 +74,9 @@ impl RevMapMerger { if Arc::ptr_eq(&self.original, rev_map) { return Ok(()); } - let RevMapping::Global(map, slots, id) = rev_map.as_ref() else { polars_bail!(ComputeError: "expected global rev-map") }; + let RevMapping::Global(map, slots, id) = rev_map.as_ref() else { + polars_bail!(ComputeError: "expected global rev-map") + }; polars_ensure!(*id == self.id, ComputeError: "categoricals don't originate from the same string cache\n\ try setting a global string cache or increase the scope of the local string cache"); diff --git a/polars/polars-core/src/chunked_array/object/extension/drop.rs b/polars/polars-core/src/chunked_array/object/extension/drop.rs index 9d904462f96d..1f678f0a946f 100644 --- a/polars/polars-core/src/chunked_array/object/extension/drop.rs +++ b/polars/polars-core/src/chunked_array/object/extension/drop.rs @@ -2,7 +2,7 @@ use crate::chunked_array::object::extension::PolarsExtension; use crate::prelude::*; /// This will dereference a raw ptr when dropping the PolarsExtension, make sure that it's valid. -pub(crate) unsafe fn drop_list(ca: &mut ListChunked) { +pub(crate) unsafe fn drop_list(ca: &ListChunked) { let mut inner = ca.inner_dtype(); let mut nested_count = 0; diff --git a/polars/polars-core/src/chunked_array/ops/any_value.rs b/polars/polars-core/src/chunked_array/ops/any_value.rs index 5519abe04a16..ae5740f33ae4 100644 --- a/polars/polars-core/src/chunked_array/ops/any_value.rs +++ b/polars/polars-core/src/chunked_array/ops/any_value.rs @@ -134,7 +134,7 @@ impl<'a> AnyValue<'a> { // so we set the array pointer with values of the dictionary array. #[cfg(feature = "dtype-categorical")] { - use polars_arrow::is_valid::{IsValid as _}; + use polars_arrow::is_valid::IsValid as _; if let Some(arr) = arr.as_any().downcast_ref::>() { let keys = arr.keys(); let values = arr.values(); @@ -144,14 +144,14 @@ impl<'a> AnyValue<'a> { if arr.is_valid_unchecked(idx) { let v = arr.value_unchecked(idx); - let DataType::Categorical(Some(rev_map)) = fld.data_type() else { + let DataType::Categorical(Some(rev_map)) = fld.data_type() + else { unimplemented!() }; AnyValue::Categorical(v, rev_map, SyncPtr::from_const(values)) } else { AnyValue::Null } - } else { arr_to_any_value(&**arr, idx, fld.data_type()) } diff --git a/polars/polars-core/src/chunked_array/ops/repeat_by.rs b/polars/polars-core/src/chunked_array/ops/repeat_by.rs index 261bf473a00b..8c58e264cc89 100644 --- a/polars/polars-core/src/chunked_array/ops/repeat_by.rs +++ b/polars/polars-core/src/chunked_array/ops/repeat_by.rs @@ -32,7 +32,7 @@ where } let iter = self .into_iter() - .zip(by.into_iter()) + .zip(by) .map(|(opt_v, opt_by)| opt_by.map(|by| std::iter::repeat(opt_v).take(by as usize))); // Safety: @@ -64,7 +64,7 @@ impl RepeatBy for BooleanChunked { let iter = self .into_iter() - .zip(by.into_iter()) + .zip(by) .map(|(opt_v, opt_by)| opt_by.map(|by| std::iter::repeat(opt_v).take(by as usize))); // Safety: @@ -93,7 +93,7 @@ impl RepeatBy for Utf8Chunked { let iter = self .into_iter() - .zip(by.into_iter()) + .zip(by) .map(|(opt_v, opt_by)| opt_by.map(|by| std::iter::repeat(opt_v).take(by as usize))); // Safety: @@ -124,7 +124,7 @@ impl RepeatBy for BinaryChunked { } let iter = self .into_iter() - .zip(by.into_iter()) + .zip(by) .map(|(opt_v, opt_by)| opt_by.map(|by| std::iter::repeat(opt_v).take(by as usize))); // Safety: diff --git a/polars/polars-core/src/chunked_array/ops/set.rs b/polars/polars-core/src/chunked_array/ops/set.rs index 9bbb92f47bcb..869604beaca4 100644 --- a/polars/polars-core/src/chunked_array/ops/set.rs +++ b/polars/polars-core/src/chunked_array/ops/set.rs @@ -55,7 +55,7 @@ where if self.chunks.len() == 1 { let arr = set_at_idx_no_null( self.downcast_iter().next().unwrap(), - idx.into_iter(), + idx, value, T::get_dtype().to_arrow(), )?; @@ -113,7 +113,7 @@ where // slow path, could be optimized. let ca = mask .into_iter() - .zip(self.into_iter()) + .zip(self) .map(|(mask_val, opt_val)| match mask_val { Some(true) => value, _ => opt_val, @@ -166,7 +166,7 @@ impl<'a> ChunkSet<'a, bool, bool> for BooleanChunked { check_bounds!(self, mask); let ca = mask .into_iter() - .zip(self.into_iter()) + .zip(self) .map(|(mask_val, opt_val)| match mask_val { Some(true) => value, _ => opt_val, @@ -229,7 +229,7 @@ impl<'a> ChunkSet<'a, &'a str, String> for Utf8Chunked { check_bounds!(self, mask); let ca = mask .into_iter() - .zip(self.into_iter()) + .zip(self) .map(|(mask_val, opt_val)| match mask_val { Some(true) => value, _ => opt_val, @@ -293,7 +293,7 @@ impl<'a> ChunkSet<'a, &'a [u8], Vec> for BinaryChunked { check_bounds!(self, mask); let ca = mask .into_iter() - .zip(self.into_iter()) + .zip(self) .map(|(mask_val, opt_val)| match mask_val { Some(true) => value, _ => opt_val, diff --git a/polars/polars-core/src/chunked_array/ops/zip.rs b/polars/polars-core/src/chunked_array/ops/zip.rs index 19cb41ada692..ae91346f1beb 100644 --- a/polars/polars-core/src/chunked_array/ops/zip.rs +++ b/polars/polars-core/src/chunked_array/ops/zip.rs @@ -167,8 +167,8 @@ impl ChunkZip> for ObjectChunked { let mut ca: Self = left .as_ref() .into_iter() - .zip(right.into_iter()) - .zip(mask.into_iter()) + .zip(right.as_ref()) + .zip(mask.as_ref()) .map(|((left_c, right_c), mask_c)| match mask_c { Some(true) => left_c.cloned(), Some(false) => right_c.cloned(), diff --git a/polars/polars-core/src/frame/arithmetic.rs b/polars/polars-core/src/frame/arithmetic.rs index c0c2150e694a..4488640a0b45 100644 --- a/polars/polars-core/src/frame/arithmetic.rs +++ b/polars/polars-core/src/frame/arithmetic.rs @@ -8,12 +8,9 @@ use crate::utils::try_get_supertype; /// Get the supertype that is valid for all columns in the DataFrame. /// This reduces casting of the rhs in arithmetic. fn get_supertype_all(df: &DataFrame, rhs: &Series) -> PolarsResult { - df.columns - .iter() - .fold(Ok(rhs.dtype().clone()), |dt, s| match dt { - Ok(dt) => try_get_supertype(s.dtype(), &dt), - e => e, - }) + df.columns.iter().try_fold(rhs.dtype().clone(), |dt, s| { + try_get_supertype(s.dtype(), &dt) + }) } macro_rules! impl_arithmetic { diff --git a/polars/polars-core/src/frame/groupby/hashing.rs b/polars/polars-core/src/frame/groupby/hashing.rs index f73bf22ba99c..d145f285d129 100644 --- a/polars/polars-core/src/frame/groupby/hashing.rs +++ b/polars/polars-core/src/frame/groupby/hashing.rs @@ -68,7 +68,7 @@ fn finish_group_order(mut out: Vec>, sorted: bool) -> GroupsProxy { items }; out.sort_unstable_by_key(|g| g.0); - let mut idx = GroupsIdx::from_iter(out.into_iter()); + let mut idx = GroupsIdx::from_iter(out); idx.sorted = true; GroupsProxy::Idx(idx) } else { @@ -123,8 +123,7 @@ fn finish_group_order_vecs( // give the compiler some info // maybe it may elide some loop counters assert_eq!(first.len(), all.len()); - for (i, (first, all)) in first.into_iter().zip(all.into_iter()).enumerate() - { + for (i, (first, all)) in first.into_iter().zip(all).enumerate() { std::ptr::write(items_ptr.add(i), (first, all)) } } @@ -136,7 +135,7 @@ fn finish_group_order_vecs( // sort again items.sort_unstable_by_key(|g| g.0); - let mut idx = GroupsIdx::from_iter(items.into_iter()); + let mut idx = GroupsIdx::from_iter(items); idx.sorted = true; GroupsProxy::Idx(idx) } else { diff --git a/polars/polars-core/src/frame/groupby/mod.rs b/polars/polars-core/src/frame/groupby/mod.rs index 47a71cc7b99a..1617608cfc3a 100644 --- a/polars/polars-core/src/frame/groupby/mod.rs +++ b/polars/polars-core/src/frame/groupby/mod.rs @@ -782,7 +782,7 @@ impl<'df> GroupBy<'df> { let mut new_cols = Vec::with_capacity(self.selected_keys.len() + agg.len()); new_cols.extend_from_slice(&self.selected_keys); let cols = self.df.select_series(agg)?; - new_cols.extend(cols.into_iter()); + new_cols.extend(cols); Ok(DataFrame::new_no_checks(new_cols)) } } else { diff --git a/polars/polars-core/src/frame/groupby/perfect.rs b/polars/polars-core/src/frame/groupby/perfect.rs index abfdd8a765ac..31165cd887a3 100644 --- a/polars/polars-core/src/frame/groupby/perfect.rs +++ b/polars/polars-core/src/frame/groupby/perfect.rs @@ -190,7 +190,9 @@ where impl CategoricalChunked { // Use the indexes as perfect groups pub fn group_tuples_perfect(&self, multithreaded: bool, sorted: bool) -> GroupsProxy { - let DataType::Categorical(Some(rev_map)) = self.dtype() else { unreachable!()}; + let DataType::Categorical(Some(rev_map)) = self.dtype() else { + unreachable!() + }; if self.is_empty() { return GroupsProxy::Idx(GroupsIdx::new(vec![], vec![], true)); } diff --git a/polars/polars-core/src/frame/groupby/proxy.rs b/polars/polars-core/src/frame/groupby/proxy.rs index 3ddf64691c04..3f1a6eeced65 100644 --- a/polars/polars-core/src/frame/groupby/proxy.rs +++ b/polars/polars-core/src/frame/groupby/proxy.rs @@ -240,7 +240,7 @@ impl IntoIterator for GroupsIdx { fn into_iter(mut self) -> Self::IntoIter { let first = std::mem::take(&mut self.first); let all = std::mem::take(&mut self.all); - first.into_iter().zip(all.into_iter()) + first.into_iter().zip(all) } } diff --git a/polars/polars-core/src/frame/row/av_buffer.rs b/polars/polars-core/src/frame/row/av_buffer.rs index ffe67cd05511..84da014f4b23 100644 --- a/polars/polars-core/src/frame/row/av_buffer.rs +++ b/polars/polars-core/src/frame/row/av_buffer.rs @@ -358,51 +358,73 @@ impl<'a> AnyValueBufferTrusted<'a> { use AnyValueBufferTrusted::*; match self { Boolean(builder) => { - let AnyValue::Boolean(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::Boolean(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(*v) } #[cfg(feature = "dtype-i8")] Int8(builder) => { - let AnyValue::Int8(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::Int8(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(*v) } #[cfg(feature = "dtype-i16")] Int16(builder) => { - let AnyValue::Int16(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::Int16(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(*v) } Int32(builder) => { - let AnyValue::Int32(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::Int32(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(*v) } Int64(builder) => { - let AnyValue::Int64(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::Int64(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(*v) } #[cfg(feature = "dtype-u8")] UInt8(builder) => { - let AnyValue::UInt8(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::UInt8(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(*v) } #[cfg(feature = "dtype-u16")] UInt16(builder) => { - let AnyValue::UInt16(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::UInt16(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(*v) } UInt32(builder) => { - let AnyValue::UInt32(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::UInt32(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(*v) } UInt64(builder) => { - let AnyValue::UInt64(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::UInt64(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(*v) } Float32(builder) => { - let AnyValue::Float32(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::Float32(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(*v) } Float64(builder) => { - let AnyValue::Float64(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::Float64(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(*v) } _ => { @@ -426,12 +448,16 @@ impl<'a> AnyValueBufferTrusted<'a> { _ => { match self { Utf8(builder) => { - let AnyValue::Utf8Owned(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::Utf8Owned(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(v) } #[cfg(feature = "dtype-struct")] Struct(builders) => { - let AnyValue::StructOwned(payload) = val else { unreachable_unchecked_release!() }; + let AnyValue::StructOwned(payload) = val else { + unreachable_unchecked_release!() + }; let avs = &*payload.0; // amortize loop counter for i in 0..avs.len() { @@ -461,12 +487,16 @@ impl<'a> AnyValueBufferTrusted<'a> { _ => { match self { Utf8(builder) => { - let AnyValue::Utf8(v) = val else { unreachable_unchecked_release!() }; + let AnyValue::Utf8(v) = val else { + unreachable_unchecked_release!() + }; builder.append_value(v) } #[cfg(feature = "dtype-struct")] Struct(builders) => { - let AnyValue::Struct(idx, arr, fields) = val else { unreachable_unchecked_release!() }; + let AnyValue::Struct(idx, arr, fields) = val else { + unreachable_unchecked_release!() + }; let arrays = arr.values(); // amortize loop counter for i in 0..fields.len() { diff --git a/polars/polars-core/src/frame/top_k.rs b/polars/polars-core/src/frame/top_k.rs index fbcb8941f918..b72116821dc9 100644 --- a/polars/polars-core/src/frame/top_k.rs +++ b/polars/polars-core/src/frame/top_k.rs @@ -33,7 +33,7 @@ impl Ord for CompareRow<'_> { impl PartialOrd for CompareRow<'_> { fn partial_cmp(&self, other: &Self) -> Option { - self.bytes.partial_cmp(other.bytes) + Some(self.cmp(other)) } } diff --git a/polars/polars-core/src/serde/chunked_array.rs b/polars/polars-core/src/serde/chunked_array.rs index bdd6634826f1..24cd6064a0a8 100644 --- a/polars/polars-core/src/serde/chunked_array.rs +++ b/polars/polars-core/src/serde/chunked_array.rs @@ -38,7 +38,7 @@ where S: Serializer, { let iter: I = self.iter.borrow_mut().take().unwrap(); - serializer.collect_seq(iter.into_iter()) + serializer.collect_seq(iter) } } diff --git a/polars/polars-core/src/series/any_value.rs b/polars/polars-core/src/series/any_value.rs index d0d494650a4b..c9cc0bed2451 100644 --- a/polars/polars-core/src/series/any_value.rs +++ b/polars/polars-core/src/series/any_value.rs @@ -65,10 +65,8 @@ fn any_values_to_decimal( } let Some((s_min, s_max)) = scale_range else { // empty array or all nulls, return a decimal array with given scale (or 0 if inferring) - return Ok( - Int128Chunked::full_null("", avs.len()) - .into_decimal_unchecked(precision, scale.unwrap_or(0)) - ); + return Ok(Int128Chunked::full_null("", avs.len()) + .into_decimal_unchecked(precision, scale.unwrap_or(0))); }; let scale = scale.unwrap_or(s_max); if s_max > scale { diff --git a/polars/polars-core/src/series/from.rs b/polars/polars-core/src/series/from.rs index 6816a2c79bba..48cf0b5eaf02 100644 --- a/polars/polars-core/src/series/from.rs +++ b/polars/polars-core/src/series/from.rs @@ -603,7 +603,7 @@ fn to_physical_and_dtype(arrays: Vec) -> (Vec, DataType) { )) as ArrayRef; let polars_fields = _fields .iter() - .zip(dtypes.into_iter()) + .zip(dtypes) .map(|(field, dtype)| Field::new(&field.name, dtype)) .collect(); (vec![arrow_array], DataType::Struct(polars_fields)) diff --git a/polars/polars-io/src/csv/read_impl/batched_read.rs b/polars/polars-io/src/csv/read_impl/batched_read.rs index f919bafd191a..47bef7240ec7 100644 --- a/polars/polars-io/src/csv/read_impl/batched_read.rs +++ b/polars/polars-io/src/csv/read_impl/batched_read.rs @@ -192,7 +192,9 @@ impl<'a> CoreReader<'a> { pub fn batched_read(mut self, _has_cat: bool) -> PolarsResult> { let reader_bytes = self.reader_bytes.take().unwrap(); - let ReaderBytes::Mapped(bytes, mut file) = &reader_bytes else { unreachable!() }; + let ReaderBytes::Mapped(bytes, mut file) = &reader_bytes else { + unreachable!() + }; let (_, starting_point_offset) = self.find_starting_point(bytes, self.quote_char, self.eol_char)?; if let Some(starting_point_offset) = starting_point_offset { diff --git a/polars/polars-io/src/json/mod.rs b/polars/polars-io/src/json/mod.rs index 79a246d5637b..37055dedd2be 100644 --- a/polars/polars-io/src/json/mod.rs +++ b/polars/polars-io/src/json/mod.rs @@ -243,8 +243,8 @@ where let dtype = infer(&json_value)?; if let Some(overwrite) = self.schema_overwrite { let ArrowDataType::Struct(fields) = dtype else { - polars_bail!(ComputeError: "can only deserialize json objects") - }; + polars_bail!(ComputeError: "can only deserialize json objects") + }; let mut schema = Schema::from_iter(fields.iter()); overwrite_schema(&mut schema, overwrite)?; diff --git a/polars/polars-io/src/ndjson/buffer.rs b/polars/polars-io/src/ndjson/buffer.rs index 654190777ea5..c4bf434ce1d3 100644 --- a/polars/polars-io/src/ndjson/buffer.rs +++ b/polars/polars-io/src/ndjson/buffer.rs @@ -190,7 +190,7 @@ fn deserialize_all<'a>( Value::Array(arr) => { let Some(inner_dtype) = dtype.inner_dtype() else { if ignore_errors { - return Ok(AnyValue::Null) + return Ok(AnyValue::Null); } polars_bail!(ComputeError: "expected list/array in json value, got {}", dtype); }; diff --git a/polars/polars-io/src/parquet/read_impl.rs b/polars/polars-io/src/parquet/read_impl.rs index 683e48fe7b63..cac8d13a5805 100644 --- a/polars/polars-io/src/parquet/read_impl.rs +++ b/polars/polars-io/src/parquet/read_impl.rs @@ -324,7 +324,7 @@ pub fn read_parquet( }; Ok(arrow_schema_to_empty_df(&schema)) } else { - accumulate_dataframes_vertical(dfs.into_iter()) + accumulate_dataframes_vertical(dfs) } } diff --git a/polars/polars-lazy/polars-pipe/src/executors/operators/projection.rs b/polars/polars-lazy/polars-pipe/src/executors/operators/projection.rs index 28bf7eb9046b..efbd0d6b2953 100644 --- a/polars/polars-lazy/polars-pipe/src/executors/operators/projection.rs +++ b/polars/polars-lazy/polars-pipe/src/executors/operators/projection.rs @@ -59,7 +59,9 @@ impl Operator for ProjectionOperator { // add temporary cse column to the chunk let cse_owned_chunk; let chunk = if let Some(hstack) = &mut self.cse_exprs { - let OperatorResult::Finished(out) = hstack.execute(context, chunk)? else { unreachable!() }; + let OperatorResult::Finished(out) = hstack.execute(context, chunk)? else { + unreachable!() + }; cse_owned_chunk = out; &cse_owned_chunk } else { @@ -137,7 +139,9 @@ impl Operator for HstackOperator { let width = chunk.data.width(); let cse_owned_chunk; let chunk = if let Some(hstack) = &mut self.cse_exprs { - let OperatorResult::Finished(out) = hstack.execute(context, chunk)? else { unreachable!() }; + let OperatorResult::Finished(out) = hstack.execute(context, chunk)? else { + unreachable!() + }; cse_owned_chunk = out; &cse_owned_chunk } else { diff --git a/polars/polars-lazy/polars-pipe/src/executors/sinks/groupby/generic/global.rs b/polars/polars-lazy/polars-pipe/src/executors/sinks/groupby/generic/global.rs index f0d480b53c3d..a65e31c8b30d 100644 --- a/polars/polars-lazy/polars-pipe/src/executors/sinks/groupby/generic/global.rs +++ b/polars/polars-lazy/polars-pipe/src/executors/sinks/groupby/generic/global.rs @@ -174,7 +174,7 @@ impl GlobalTable { } } - pub(super) fn merge_local_map(&self, finalized_local_map: &mut AggHashTable) { + pub(super) fn merge_local_map(&self, finalized_local_map: &AggHashTable) { // TODO! maybe parallelize? // needs unsafe, first benchmark. for (partition_i, pt_map) in self.inner_maps.iter().enumerate() { diff --git a/polars/polars-lazy/polars-pipe/src/executors/sinks/groupby/generic/hash_table.rs b/polars/polars-lazy/polars-pipe/src/executors/sinks/groupby/generic/hash_table.rs index 30088cfbd864..7a0afc446c2d 100644 --- a/polars/polars-lazy/polars-pipe/src/executors/sinks/groupby/generic/hash_table.rs +++ b/polars/polars-lazy/polars-pipe/src/executors/sinks/groupby/generic/hash_table.rs @@ -152,14 +152,14 @@ impl AggHashTable { false } - pub(super) fn combine(&mut self, other: &mut Self) { + pub(super) fn combine(&mut self, other: &Self) { self.combine_impl(other, |_hash| true) } pub(super) fn combine_on_partition( &mut self, partition: usize, - other: &mut AggHashTable, + other: &AggHashTable, ) { let partition = partition as u64; self.combine_impl(other, |hash| { @@ -169,7 +169,7 @@ impl AggHashTable { pub(super) fn combine_impl( &mut self, - other: &mut AggHashTable, + other: &AggHashTable, on_condition: C, ) // takes a hash and if true, this keys will be combined diff --git a/polars/polars-lazy/polars-pipe/src/executors/sinks/groupby/generic/thread_local.rs b/polars/polars-lazy/polars-pipe/src/executors/sinks/groupby/generic/thread_local.rs index 3abea87e4911..0a4f5c52b3c7 100644 --- a/polars/polars-lazy/polars-pipe/src/executors/sinks/groupby/generic/thread_local.rs +++ b/polars/polars-lazy/polars-pipe/src/executors/sinks/groupby/generic/thread_local.rs @@ -171,10 +171,7 @@ impl SpillPartitions { other.finish(); let other_payloads = std::mem::take(&mut other.finished_payloads); - for (part_self, part_other) in self - .finished_payloads - .iter_mut() - .zip(other_payloads.into_iter()) + for (part_self, part_other) in self.finished_payloads.iter_mut().zip(other_payloads) { part_self.extend(part_other) } @@ -214,7 +211,7 @@ impl SpillPartitions { }, ) }) - .chain(flattened.into_iter()) + .chain(flattened) } } @@ -280,7 +277,7 @@ impl ThreadLocalTable { } pub(super) fn combine(&mut self, other: &mut Self) { - self.inner_map.combine(&mut other.inner_map); + self.inner_map.combine(&other.inner_map); self.spill_partitions.combine(&mut other.spill_partitions); } diff --git a/polars/polars-lazy/polars-pipe/src/executors/sinks/joins/cross.rs b/polars/polars-lazy/polars-pipe/src/executors/sinks/joins/cross.rs index d3207379e818..3782b25b9903 100644 --- a/polars/polars-lazy/polars-pipe/src/executors/sinks/joins/cross.rs +++ b/polars/polars-lazy/polars-pipe/src/executors/sinks/joins/cross.rs @@ -37,7 +37,7 @@ impl Sink for CrossJoin { fn combine(&mut self, other: &mut dyn Sink) { let other = other.as_any().downcast_mut::().unwrap(); let other_chunks = std::mem::take(&mut other.chunks); - self.chunks.extend(other_chunks.into_iter()); + self.chunks.extend(other_chunks); } fn split(&self, _thread_no: usize) -> Box { diff --git a/polars/polars-lazy/polars-pipe/src/pipeline/convert.rs b/polars/polars-lazy/polars-pipe/src/pipeline/convert.rs index b2d0aaed03d9..05c3e784fc53 100644 --- a/polars/polars-lazy/polars-pipe/src/pipeline/convert.rs +++ b/polars/polars-lazy/polars-pipe/src/pipeline/convert.rs @@ -18,7 +18,7 @@ use crate::pipeline::PipeLine; fn exprs_to_physical( exprs: &[Node], - expr_arena: &mut Arena, + expr_arena: &Arena, to_physical: &F, schema: Option<&SchemaRef>, ) -> PolarsResult>> @@ -119,7 +119,7 @@ where pub fn get_sink( node: Node, - lp_arena: &mut Arena, + lp_arena: &Arena, expr_arena: &mut Arena, to_physical: &F, ) -> PolarsResult> @@ -398,7 +398,7 @@ pub fn get_dummy_operator() -> Box { fn get_hstack( exprs: &[Node], - expr_arena: &mut Arena, + expr_arena: &Arena, to_physical: &F, input_schema: SchemaRef, cse_exprs: Option>, @@ -417,8 +417,8 @@ where pub fn get_operator( node: Node, - lp_arena: &mut Arena, - expr_arena: &mut Arena, + lp_arena: &Arena, + expr_arena: &Arena, to_physical: &F, ) -> PolarsResult> where @@ -518,7 +518,7 @@ pub fn create_pipeline( operators: Vec>, operator_nodes: Vec, sink_nodes: Vec<(usize, Node, Rc>)>, - lp_arena: &mut Arena, + lp_arena: &Arena, expr_arena: &mut Arena, to_physical: F, verbose: bool, diff --git a/polars/polars-lazy/polars-plan/src/dsl/function_expr/cat.rs b/polars/polars-lazy/polars-plan/src/dsl/function_expr/cat.rs index 455ae39e8805..f2bc703b39fa 100644 --- a/polars/polars-lazy/polars-plan/src/dsl/function_expr/cat.rs +++ b/polars/polars-lazy/polars-plan/src/dsl/function_expr/cat.rs @@ -54,7 +54,9 @@ fn set_ordering(s: &Series, lexical: bool) -> PolarsResult { fn get_categories(s: &Series) -> PolarsResult { // categorical check let ca = s.categorical()?; - let DataType::Categorical(Some(rev_map)) = ca.dtype() else { unreachable!() }; + let DataType::Categorical(Some(rev_map)) = ca.dtype() else { + unreachable!() + }; let arr = rev_map.get_categories().clone().boxed(); Series::try_from((ca.name(), arr)) } diff --git a/polars/polars-lazy/polars-plan/src/dsl/function_expr/list.rs b/polars/polars-lazy/polars-plan/src/dsl/function_expr/list.rs index cd2bbbcae6a8..f42e745653d7 100644 --- a/polars/polars-lazy/polars-plan/src/dsl/function_expr/list.rs +++ b/polars/polars-lazy/polars-plan/src/dsl/function_expr/list.rs @@ -97,7 +97,7 @@ pub(super) fn slice(args: &mut [Series]) -> PolarsResult> { list_ca .amortized_iter() - .zip(length_ca.into_iter()) + .zip(length_ca) .map(|(opt_s, opt_length)| match (opt_s, opt_length) { (Some(s), Some(length)) => Some(s.as_ref().slice(offset, length as usize)), _ => None, @@ -134,8 +134,8 @@ pub(super) fn slice(args: &mut [Series]) -> PolarsResult> { list_ca .amortized_iter() - .zip(offset_ca.into_iter()) - .zip(length_ca.into_iter()) + .zip(offset_ca) + .zip(length_ca) .map( |((opt_s, opt_offset), opt_length)| match (opt_s, opt_offset, opt_length) { (Some(s), Some(offset), Some(length)) => { diff --git a/polars/polars-lazy/polars-plan/src/dsl/function_expr/mod.rs b/polars/polars-lazy/polars-plan/src/dsl/function_expr/mod.rs index 3dd74b58331f..5e59c5fa2bac 100644 --- a/polars/polars-lazy/polars-plan/src/dsl/function_expr/mod.rs +++ b/polars/polars-lazy/polars-plan/src/dsl/function_expr/mod.rs @@ -1,3 +1,4 @@ +#![allow(clippy::needless_pass_by_ref_mut)] // TODO: remove once false positives are fixed in rustc #[cfg(feature = "abs")] mod abs; #[cfg(feature = "arg_where")] diff --git a/polars/polars-lazy/polars-plan/src/dsl/function_expr/pow.rs b/polars/polars-lazy/polars-plan/src/dsl/function_expr/pow.rs index ef539525a7bc..27b97d9494bd 100644 --- a/polars/polars-lazy/polars-plan/src/dsl/function_expr/pow.rs +++ b/polars/polars-lazy/polars-plan/src/dsl/function_expr/pow.rs @@ -36,7 +36,7 @@ where if exponent.len() == 1 { let Some(exponent_value) = exponent.get(0) else { - return Ok(Some(Series::full_null(base.name(), base.len(), &dtype))) + return Ok(Some(Series::full_null(base.name(), base.len(), &dtype))); }; let s = match exponent_value.to_f64().unwrap() { a if a == 1.0 => base.clone().into_series(), @@ -65,7 +65,7 @@ where } else { Ok(Some( base.into_iter() - .zip(exponent.into_iter()) + .zip(exponent) .map(|(opt_base, opt_exponent)| match (opt_base, opt_exponent) { (Some(base), Some(exponent)) => Some(num::pow::Pow::pow(base, exponent)), _ => None, diff --git a/polars/polars-lazy/polars-plan/src/dsl/function_expr/range.rs b/polars/polars-lazy/polars-plan/src/dsl/function_expr/range.rs index 611e0fa8b5fe..19d1c75a62ad 100644 --- a/polars/polars-lazy/polars-plan/src/dsl/function_expr/range.rs +++ b/polars/polars-lazy/polars-plan/src/dsl/function_expr/range.rs @@ -136,7 +136,7 @@ pub(super) fn int_ranges(s: &[Series], step: i64) -> PolarsResult { DataType::Int64, ); - for (opt_start, opt_end) in start.into_iter().zip(end.into_iter()) { + for (opt_start, opt_end) in start.into_iter().zip(end) { match (opt_start, opt_end) { (Some(start_v), Some(end_v)) => match step { 1 => { diff --git a/polars/polars-lazy/polars-plan/src/dsl/function_expr/strings.rs b/polars/polars-lazy/polars-plan/src/dsl/function_expr/strings.rs index 60486b1cc073..57236d05ab96 100644 --- a/polars/polars-lazy/polars-plan/src/dsl/function_expr/strings.rs +++ b/polars/polars-lazy/polars-plan/src/dsl/function_expr/strings.rs @@ -187,8 +187,9 @@ pub(super) fn lengths(s: &Series) -> PolarsResult { #[cfg(feature = "regex")] pub(super) fn contains(s: &[Series], literal: bool, strict: bool) -> PolarsResult { - let ca = &s[0].utf8()?; - let pat = &s[1].utf8()?; + // TODO! move to polars-ops + let ca = s[0].utf8()?; + let pat = s[1].utf8()?; let mut out: BooleanChunked = match pat.len() { 1 => match pat.get(0) { @@ -204,7 +205,7 @@ pub(super) fn contains(s: &[Series], literal: bool, strict: bool) -> PolarsResul _ => { if literal { ca.into_iter() - .zip(pat.into_iter()) + .zip(pat) .map(|(opt_src, opt_val)| match (opt_src, opt_val) { (Some(src), Some(pat)) => src.contains(pat), _ => false, @@ -212,7 +213,7 @@ pub(super) fn contains(s: &[Series], literal: bool, strict: bool) -> PolarsResul .collect_trusted() } else if strict { ca.into_iter() - .zip(pat.into_iter()) + .zip(pat) .map(|(opt_src, opt_val)| match (opt_src, opt_val) { (Some(src), Some(pat)) => { let re = Regex::new(pat)?; @@ -223,7 +224,7 @@ pub(super) fn contains(s: &[Series], literal: bool, strict: bool) -> PolarsResul .collect::>()? } else { ca.into_iter() - .zip(pat.into_iter()) + .zip(pat) .map(|(opt_src, opt_val)| match (opt_src, opt_val) { (Some(src), Some(pat)) => Regex::new(pat).ok().map(|re| re.is_match(src)), _ => Some(false), @@ -238,8 +239,8 @@ pub(super) fn contains(s: &[Series], literal: bool, strict: bool) -> PolarsResul } pub(super) fn ends_with(s: &[Series]) -> PolarsResult { - let ca = &s[0].utf8()?; - let sub = &s[1].utf8()?; + let ca = s[0].utf8()?; + let sub = s[1].utf8()?; let mut out: BooleanChunked = match sub.len() { 1 => match sub.get(0) { @@ -248,7 +249,7 @@ pub(super) fn ends_with(s: &[Series]) -> PolarsResult { }, _ => ca .into_iter() - .zip(sub.into_iter()) + .zip(sub) .map(|(opt_src, opt_val)| match (opt_src, opt_val) { (Some(src), Some(val)) => src.ends_with(val), _ => false, @@ -261,8 +262,8 @@ pub(super) fn ends_with(s: &[Series]) -> PolarsResult { } pub(super) fn starts_with(s: &[Series]) -> PolarsResult { - let ca = &s[0].utf8()?; - let sub = &s[1].utf8()?; + let ca = s[0].utf8()?; + let sub = s[1].utf8()?; let mut out: BooleanChunked = match sub.len() { 1 => match sub.get(0) { @@ -271,7 +272,7 @@ pub(super) fn starts_with(s: &[Series]) -> PolarsResult { }, _ => ca .into_iter() - .zip(sub.into_iter()) + .zip(sub) .map(|(opt_src, opt_val)| match (opt_src, opt_val) { (Some(src), Some(val)) => src.starts_with(val), _ => false, @@ -537,7 +538,7 @@ where { let mut out: Utf8Chunked = ca .into_iter() - .zip(val.into_iter()) + .zip(val) .map(|(opt_src, opt_val)| match (opt_src, opt_val) { (Some(src), Some(val)) => Some(f(src, val)), _ => None, diff --git a/polars/polars-lazy/polars-plan/src/dsl/function_expr/temporal.rs b/polars/polars-lazy/polars-plan/src/dsl/function_expr/temporal.rs index 3ae2eb9c52d7..ae07aa4fd636 100644 --- a/polars/polars-lazy/polars-plan/src/dsl/function_expr/temporal.rs +++ b/polars/polars-lazy/polars-plan/src/dsl/function_expr/temporal.rs @@ -186,7 +186,7 @@ pub(super) fn temporal_range_dispatch( start.len() * 5, DataType::Int32, ); - for (start, stop) in start.into_iter().zip(stop.into_iter()) { + for (start, stop) in start.into_iter().zip(stop) { match (start, stop) { (Some(start), Some(stop)) => { let rng = date_range_impl( @@ -215,7 +215,7 @@ pub(super) fn temporal_range_dispatch( start.len() * 5, DataType::Int64, ); - for (start, stop) in start.into_iter().zip(stop.into_iter()) { + for (start, stop) in start.into_iter().zip(stop) { match (start, stop) { (Some(start), Some(stop)) => { let rng = date_range_impl("", start, stop, every, closed, tu, tz.as_ref())?; @@ -233,7 +233,7 @@ pub(super) fn temporal_range_dispatch( start.len() * 5, DataType::Int64, ); - for (start, stop) in start.into_iter().zip(stop.into_iter()) { + for (start, stop) in start.into_iter().zip(stop) { match (start, stop) { (Some(start), Some(stop)) => { let rng = date_range_impl( diff --git a/polars/polars-lazy/polars-plan/src/dsl/function_expr/trigonometry.rs b/polars/polars-lazy/polars-plan/src/dsl/function_expr/trigonometry.rs index 6cf425b35b8d..866ccb713ec3 100644 --- a/polars/polars-lazy/polars-plan/src/dsl/function_expr/trigonometry.rs +++ b/polars/polars-lazy/polars-plan/src/dsl/function_expr/trigonometry.rs @@ -130,7 +130,7 @@ where } else { Ok(Some( y.into_iter() - .zip(x.into_iter()) + .zip(x) .map(|(opt_y, opt_x)| match (opt_y, opt_x) { (Some(y), Some(x)) => Some(y.atan2(x)), _ => None, diff --git a/polars/polars-lazy/polars-plan/src/dsl/functions/temporal.rs b/polars/polars-lazy/polars-plan/src/dsl/functions/temporal.rs index 30b51c117502..2b035d265cc7 100644 --- a/polars/polars-lazy/polars-plan/src/dsl/functions/temporal.rs +++ b/polars/polars-lazy/polars-plan/src/dsl/functions/temporal.rs @@ -138,12 +138,12 @@ pub fn datetime(args: DatetimeArgs) -> Expr { let ca: Int64Chunked = year .into_iter() - .zip(month.into_iter()) - .zip(day.into_iter()) - .zip(hour.into_iter()) - .zip(minute.into_iter()) - .zip(second.into_iter()) - .zip(microsecond.into_iter()) + .zip(month) + .zip(day) + .zip(hour) + .zip(minute) + .zip(second) + .zip(microsecond) .map(|((((((y, m), d), h), mnt), s), us)| { if let (Some(y), Some(m), Some(d), Some(h), Some(mnt), Some(s), Some(us)) = (y, m, d, h, mnt, s, us) diff --git a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/cse_expr.rs b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/cse_expr.rs index f725cf7d9d31..0fb9d86c09c5 100644 --- a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/cse_expr.rs +++ b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/cse_expr.rs @@ -617,7 +617,9 @@ mod test { }) .unwrap(); - let ALogicalPlan::Projection {expr, ..} = out.to_alp() else { unreachable!() }; + let ALogicalPlan::Projection { expr, .. } = out.to_alp() else { + unreachable!() + }; let default = expr.default_exprs(); assert_eq!(default.len(), 3); diff --git a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/fast_projection.rs b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/fast_projection.rs index e796ff0a4032..b88384951188 100644 --- a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/fast_projection.rs +++ b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/fast_projection.rs @@ -32,7 +32,7 @@ impl FastProjectionAndCollapse { fn impl_fast_projection( input: Node, expr: &[Node], - expr_arena: &mut Arena, + expr_arena: &Arena, ) -> Option { let mut columns = Vec::with_capacity(expr.len()); for node in expr.iter() { diff --git a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/fused.rs b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/fused.rs index 9009e6de903f..76117d96bbd2 100644 --- a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/fused.rs +++ b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/fused.rs @@ -26,7 +26,9 @@ fn check_eligible( expr_arena: &Arena, lp_arena: &Arena, ) -> PolarsResult<(Option, Option)> { - let Some(input_node) = lp_arena.get(lp_node).get_input() else {return Ok((None, None))}; + let Some(input_node) = lp_arena.get(lp_node).get_input() else { + return Ok((None, None)); + }; let schema = lp_arena.get(input_node).schema(lp_arena); let field_left = expr_arena .get(*left) diff --git a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/predicate_pushdown/utils.rs b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/predicate_pushdown/utils.rs index 6fd09f22f54b..3526e5b303a9 100644 --- a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/predicate_pushdown/utils.rs +++ b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/predicate_pushdown/utils.rs @@ -265,7 +265,7 @@ fn rename_predicate_columns_due_to_aliased_projection( /// Implementation for both Hstack and Projection pub(super) fn rewrite_projection_node( expr_arena: &mut Arena, - lp_arena: &mut Arena, + lp_arena: &Arena, acc_predicates: &mut PlHashMap, Node>, projections: Vec, input: Node, diff --git a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/projection_pushdown/joins.rs b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/projection_pushdown/joins.rs index d9749ea1fca5..9bd5290edeb9 100644 --- a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/projection_pushdown/joins.rs +++ b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/projection_pushdown/joins.rs @@ -51,7 +51,9 @@ pub(super) fn process_asof_join( let mut names_right = PlHashSet::with_capacity(n); let mut local_projection = Vec::with_capacity(n); - let JoinType::AsOf(asof_options) = &options.args.how else {unreachable!()}; + let JoinType::AsOf(asof_options) = &options.args.how else { + unreachable!() + }; // if there are no projections we don't have to do anything (all columns are projected) // otherwise we build local projections to sort out proper column names due to the diff --git a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/projection_pushdown/mod.rs b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/projection_pushdown/mod.rs index 91c812b29309..d853e7be86b5 100644 --- a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/projection_pushdown/mod.rs +++ b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/projection_pushdown/mod.rs @@ -73,7 +73,7 @@ fn get_scan_columns( fn split_acc_projections( acc_projections: Vec, down_schema: &Schema, - expr_arena: &mut Arena, + expr_arena: &Arena, expands_schema: bool, ) -> (Vec, Vec, PlHashSet>) { // If node above has as many columns as the projection there is nothing to pushdown. @@ -99,7 +99,7 @@ fn add_expr_to_accumulated( expr: Node, acc_projections: &mut Vec, projected_names: &mut PlHashSet>, - expr_arena: &mut Arena, + expr_arena: &Arena, ) { for root_node in aexpr_to_column_nodes_iter(expr, expr_arena) { for name in aexpr_to_leaf_names_iter(root_node, expr_arena) { @@ -218,7 +218,7 @@ impl ProjectionPushDown { pushdown_right: &mut Vec, names_left: &mut PlHashSet>, names_right: &mut PlHashSet>, - expr_arena: &mut Arena, + expr_arena: &Arena, ) -> (bool, bool) { let mut pushed_at_least_one = false; let mut already_projected = false; diff --git a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/simplify_expr.rs b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/simplify_expr.rs index c11fcab39fd0..d668e35fc169 100644 --- a/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/simplify_expr.rs +++ b/polars/polars-lazy/polars-plan/src/logical_plan/optimizer/simplify_expr.rs @@ -678,7 +678,9 @@ fn inline_cast(input: &AExpr, dtype: &DataType, strict: bool) -> PolarsResult { - let Some(av) = lv.to_anyvalue() else {return Ok(None)}; + let Some(av) = lv.to_anyvalue() else { + return Ok(None); + }; match (av, dtype) { // casting null always remains null (AnyValue::Null, _) => return Ok(None), diff --git a/polars/polars-lazy/polars-plan/src/logical_plan/projection.rs b/polars/polars-lazy/polars-plan/src/logical_plan/projection.rs index e5e7cb164b84..4620d1ebbe13 100644 --- a/polars/polars-lazy/polars-plan/src/logical_plan/projection.rs +++ b/polars/polars-lazy/polars-plan/src/logical_plan/projection.rs @@ -588,7 +588,9 @@ fn replace_selector(expr: &mut Expr, schema: &Schema, keys: &[Expr]) -> PolarsRe members .into_iter() .map(|e| { - let Expr::Column(name) = e else {unreachable!()}; + let Expr::Column(name) = e else { + unreachable!() + }; name.to_string() }) .collect(), diff --git a/polars/polars-lazy/polars-plan/src/utils.rs b/polars/polars-lazy/polars-plan/src/utils.rs index 0deedbf59e27..c3f79f49f6f6 100644 --- a/polars/polars-lazy/polars-plan/src/utils.rs +++ b/polars/polars-lazy/polars-plan/src/utils.rs @@ -397,7 +397,7 @@ where pub fn expr_is_projected_upstream( e: &Node, input: Node, - lp_arena: &mut Arena, + lp_arena: &Arena, expr_arena: &Arena, projected_names: &PlHashSet>, ) -> bool { diff --git a/polars/polars-lazy/src/dsl/functions.rs b/polars/polars-lazy/src/dsl/functions.rs index 0e8a778db0c6..44710a2bb464 100644 --- a/polars/polars-lazy/src/dsl/functions.rs +++ b/polars/polars-lazy/src/dsl/functions.rs @@ -68,7 +68,13 @@ pub(crate) fn concat_impl>( }; if convert_supertypes { - let LogicalPlan::Union {mut inputs, options} = lf.logical_plan else { unreachable!()} ; + let LogicalPlan::Union { + mut inputs, + options, + } = lf.logical_plan + else { + unreachable!() + }; let mut schema = inputs[0].schema()?.as_ref().as_ref().clone(); let mut changed = false; @@ -141,7 +147,7 @@ pub fn diag_concat_lf>( let lfs_with_all_columns = lfs .into_iter() // Zip Frames with their Schemas - .zip(schemas.into_iter()) + .zip(schemas) .map(|(mut lf, lf_schema)| { for (name, dtype) in total_schema.iter() { // If a name from Total Schema is not present - append diff --git a/polars/polars-lazy/src/frame/mod.rs b/polars/polars-lazy/src/frame/mod.rs index b796aeb77cf6..ba3328df6d8a 100644 --- a/polars/polars-lazy/src/frame/mod.rs +++ b/polars/polars-lazy/src/frame/mod.rs @@ -374,7 +374,7 @@ impl LazyFrame { let mut existing_vec: Vec = Vec::with_capacity(cap); let mut new_vec: Vec = Vec::with_capacity(cap); - for (existing, new) in iter.zip(new.into_iter()) { + for (existing, new) in iter.zip(new) { let existing = existing.as_ref(); let new = new.as_ref(); diff --git a/polars/polars-lazy/src/physical_plan/executors/groupby.rs b/polars/polars-lazy/src/physical_plan/executors/groupby.rs index 9b0390070f5f..b2cb72d32e36 100644 --- a/polars/polars-lazy/src/physical_plan/executors/groupby.rs +++ b/polars/polars-lazy/src/physical_plan/executors/groupby.rs @@ -6,7 +6,7 @@ pub(super) fn evaluate_aggs( df: &DataFrame, aggs: &[Arc], groups: &GroupsProxy, - state: &mut ExecutionState, + state: &ExecutionState, ) -> PolarsResult> { POOL.install(|| { aggs.par_iter() @@ -61,7 +61,7 @@ pub(super) fn groupby_helper( keys: Vec, aggs: &[Arc], apply: Option>, - state: &mut ExecutionState, + state: &ExecutionState, maintain_order: bool, slice: Option<(i64, usize)>, ) -> PolarsResult { @@ -97,11 +97,7 @@ pub(super) fn groupby_helper( } impl GroupByExec { - fn execute_impl( - &mut self, - state: &mut ExecutionState, - df: DataFrame, - ) -> PolarsResult { + fn execute_impl(&mut self, state: &ExecutionState, df: DataFrame) -> PolarsResult { let keys = self .keys .iter() diff --git a/polars/polars-lazy/src/physical_plan/executors/groupby_dynamic.rs b/polars/polars-lazy/src/physical_plan/executors/groupby_dynamic.rs index fc092b296451..651ee63716e1 100644 --- a/polars/polars-lazy/src/physical_plan/executors/groupby_dynamic.rs +++ b/polars/polars-lazy/src/physical_plan/executors/groupby_dynamic.rs @@ -23,7 +23,7 @@ impl GroupByDynamicExec { #[cfg(feature = "dynamic_groupby")] fn execute_impl( &mut self, - state: &mut ExecutionState, + state: &ExecutionState, mut df: DataFrame, ) -> PolarsResult { df.as_single_chunk_par(); diff --git a/polars/polars-lazy/src/physical_plan/executors/groupby_partitioned.rs b/polars/polars-lazy/src/physical_plan/executors/groupby_partitioned.rs index 9184294c5f28..b34b8464b55f 100644 --- a/polars/polars-lazy/src/physical_plan/executors/groupby_partitioned.rs +++ b/polars/polars-lazy/src/physical_plan/executors/groupby_partitioned.rs @@ -270,6 +270,7 @@ impl PartitionGroupByExec { } } + #[allow(clippy::needless_pass_by_ref_mut)] fn execute_impl( &mut self, state: &mut ExecutionState, diff --git a/polars/polars-lazy/src/physical_plan/executors/groupby_rolling.rs b/polars/polars-lazy/src/physical_plan/executors/groupby_rolling.rs index a39b507c8eba..88f2f1dc39f0 100644 --- a/polars/polars-lazy/src/physical_plan/executors/groupby_rolling.rs +++ b/polars/polars-lazy/src/physical_plan/executors/groupby_rolling.rs @@ -21,7 +21,7 @@ impl GroupByRollingExec { #[cfg(feature = "dynamic_groupby")] fn execute_impl( &mut self, - state: &mut ExecutionState, + state: &ExecutionState, mut df: DataFrame, ) -> PolarsResult { df.as_single_chunk_par(); diff --git a/polars/polars-lazy/src/physical_plan/executors/mod.rs b/polars/polars-lazy/src/physical_plan/executors/mod.rs index d8af6384f66a..a9813ffc6ab9 100644 --- a/polars/polars-lazy/src/physical_plan/executors/mod.rs +++ b/polars/polars-lazy/src/physical_plan/executors/mod.rs @@ -154,7 +154,7 @@ pub(super) fn evaluate_physical_expressions( df: &mut DataFrame, cse_exprs: &[Arc], exprs: &[Arc], - state: &mut ExecutionState, + state: &ExecutionState, has_windows: bool, ) -> PolarsResult> { let selected_columns = if !cse_exprs.is_empty() { diff --git a/polars/polars-lazy/src/physical_plan/executors/projection.rs b/polars/polars-lazy/src/physical_plan/executors/projection.rs index e7ffae6febaf..b4e5a11ded9c 100644 --- a/polars/polars-lazy/src/physical_plan/executors/projection.rs +++ b/polars/polars-lazy/src/physical_plan/executors/projection.rs @@ -15,7 +15,7 @@ pub struct ProjectionExec { impl ProjectionExec { fn execute_impl( &mut self, - state: &mut ExecutionState, + state: &ExecutionState, mut df: DataFrame, ) -> PolarsResult { #[allow(clippy::let_and_return)] diff --git a/polars/polars-lazy/src/physical_plan/executors/sort.rs b/polars/polars-lazy/src/physical_plan/executors/sort.rs index 2d34175d7212..b3c64d0ab61b 100644 --- a/polars/polars-lazy/src/physical_plan/executors/sort.rs +++ b/polars/polars-lazy/src/physical_plan/executors/sort.rs @@ -9,7 +9,7 @@ pub(crate) struct SortExec { impl SortExec { fn execute_impl( &mut self, - state: &mut ExecutionState, + state: &ExecutionState, mut df: DataFrame, ) -> PolarsResult { df.as_single_chunk_par(); diff --git a/polars/polars-lazy/src/physical_plan/executors/stack.rs b/polars/polars-lazy/src/physical_plan/executors/stack.rs index 732ae167d179..18f4572cc526 100644 --- a/polars/polars-lazy/src/physical_plan/executors/stack.rs +++ b/polars/polars-lazy/src/physical_plan/executors/stack.rs @@ -11,7 +11,7 @@ pub struct StackExec { impl StackExec { fn execute_impl( &mut self, - state: &mut ExecutionState, + state: &ExecutionState, mut df: DataFrame, ) -> PolarsResult { let res = evaluate_physical_expressions( diff --git a/polars/polars-lazy/src/physical_plan/expressions/take.rs b/polars/polars-lazy/src/physical_plan/expressions/take.rs index be67d9b77c08..f354965f9ee5 100644 --- a/polars/polars-lazy/src/physical_plan/expressions/take.rs +++ b/polars/polars-lazy/src/physical_plan/expressions/take.rs @@ -58,121 +58,113 @@ impl PhysicalExpr for TakeExpr { let mut ac = self.phys_expr.evaluate_on_groups(df, groups, state)?; let mut idx = self.idx.evaluate_on_groups(df, groups, state)?; - let idx = - match idx.state { - AggState::AggregatedFlat(s) => { - let idx = s.cast(&IDX_DTYPE)?; - let idx = idx.idx().unwrap(); - - // The indexes are AggregatedFlat, meaning they are a single values pointing into - // a group. - // If we zip this with the first of each group -> `idx + firs` then we can - // simply use a take operation on the whole array instead of per group. - - // The groups maybe scattered all over the place, so we sort by group - ac.sort_by_groups(); - - // A previous aggregation may have updated the groups - let groups = ac.groups(); - - // Determine the take indices - let idx: IdxCa = - match groups.as_ref() { - GroupsProxy::Idx(groups) => { - if groups.all().iter().zip(idx.into_iter()).any( - |(g, idx)| match idx { - None => true, - Some(idx) => idx >= g.len() as IdxSize, - }, - ) { - self.oob_err()?; - } + let idx = match idx.state { + AggState::AggregatedFlat(s) => { + let idx = s.cast(&IDX_DTYPE)?; + let idx = idx.idx().unwrap(); + + // The indexes are AggregatedFlat, meaning they are a single values pointing into + // a group. + // If we zip this with the first of each group -> `idx + firs` then we can + // simply use a take operation on the whole array instead of per group. + + // The groups maybe scattered all over the place, so we sort by group + ac.sort_by_groups(); + + // A previous aggregation may have updated the groups + let groups = ac.groups(); + + // Determine the take indices + let idx: IdxCa = match groups.as_ref() { + GroupsProxy::Idx(groups) => { + if groups.all().iter().zip(idx).any(|(g, idx)| match idx { + None => true, + Some(idx) => idx >= g.len() as IdxSize, + }) { + self.oob_err()?; + } - idx.into_iter() - .zip(groups.first().iter()) - .map(|(idx, first)| idx.map(|idx| idx + first)) - .collect_trusted() - } - GroupsProxy::Slice { groups, .. } => { - if groups - .iter() - .zip(idx.into_iter()) - .any(|(g, idx)| match idx { - None => true, - Some(idx) => idx >= g[1], - }) - { - self.oob_err()?; - } + idx.into_iter() + .zip(groups.first().iter()) + .map(|(idx, first)| idx.map(|idx| idx + first)) + .collect_trusted() + } + GroupsProxy::Slice { groups, .. } => { + if groups.iter().zip(idx).any(|(g, idx)| match idx { + None => true, + Some(idx) => idx >= g[1], + }) { + self.oob_err()?; + } - idx.into_iter() - .zip(groups.iter()) - .map(|(idx, g)| idx.map(|idx| idx + g[0])) - .collect_trusted() + idx.into_iter() + .zip(groups.iter()) + .map(|(idx, g)| idx.map(|idx| idx + g[0])) + .collect_trusted() + } + }; + let taken = ac.flat_naive().take(&idx)?; + ac.with_series(taken, true, Some(&self.expr))?; + return Ok(ac); + } + AggState::AggregatedList(s) => s.list().unwrap().clone(), + // Maybe a literal as well, this needs a different path + AggState::NotAggregated(_) => { + let s = idx.aggregated(); + s.list().unwrap().clone() + } + AggState::Literal(s) => { + let idx = s.cast(&IDX_DTYPE)?; + let idx = idx.idx().unwrap(); + + return if idx.len() == 1 { + match idx.get(0) { + None => polars_bail!(ComputeError: "cannot take by a null"), + Some(idx) => { + if idx != 0 { + // We must make sure that the column we take from is sorted by + // groups otherwise we might point into the wrong group + ac.sort_by_groups() } - }; - let taken = ac.flat_naive().take(&idx)?; - ac.with_series(taken, true, Some(&self.expr))?; - return Ok(ac); - } - AggState::AggregatedList(s) => s.list().unwrap().clone(), - // Maybe a literal as well, this needs a different path - AggState::NotAggregated(_) => { - let s = idx.aggregated(); - s.list().unwrap().clone() - } - AggState::Literal(s) => { - let idx = s.cast(&IDX_DTYPE)?; - let idx = idx.idx().unwrap(); - - return if idx.len() == 1 { - match idx.get(0) { - None => polars_bail!(ComputeError: "cannot take by a null"), - Some(idx) => { - if idx != 0 { - // We must make sure that the column we take from is sorted by - // groups otherwise we might point into the wrong group - ac.sort_by_groups() - } - // Make sure that we look at the updated groups. - let groups = ac.groups(); - - // we offset the groups first by idx; - let idx: NoNull = match groups.as_ref() { - GroupsProxy::Idx(groups) => { - if groups.all().iter().any(|g| idx >= g.len() as IdxSize) { - self.oob_err()?; - } - - groups.first().iter().map(|f| *f + idx).collect_trusted() + // Make sure that we look at the updated groups. + let groups = ac.groups(); + + // we offset the groups first by idx; + let idx: NoNull = match groups.as_ref() { + GroupsProxy::Idx(groups) => { + if groups.all().iter().any(|g| idx >= g.len() as IdxSize) { + self.oob_err()?; } - GroupsProxy::Slice { groups, .. } => { - if groups.iter().any(|g| idx >= g[1]) { - self.oob_err()?; - } - groups.iter().map(|g| g[0] + idx).collect_trusted() + groups.first().iter().map(|f| *f + idx).collect_trusted() + } + GroupsProxy::Slice { groups, .. } => { + if groups.iter().any(|g| idx >= g[1]) { + self.oob_err()?; } - }; - let taken = ac.flat_naive().take(&idx.into_inner())?; - ac.with_series(taken, true, Some(&self.expr))?; - ac.with_update_groups(UpdateGroups::WithGroupsLen); - Ok(ac) - } + + groups.iter().map(|g| g[0] + idx).collect_trusted() + } + }; + let taken = ac.flat_naive().take(&idx.into_inner())?; + ac.with_series(taken, true, Some(&self.expr))?; + ac.with_update_groups(UpdateGroups::WithGroupsLen); + Ok(ac) } - } else { - let out = ac - .aggregated() - .list() - .unwrap() - .try_apply_amortized(|s| s.as_ref().take(idx))?; - - ac.with_series(out.into_series(), true, Some(&self.expr))?; - ac.with_update_groups(UpdateGroups::WithGroupsLen); - Ok(ac) - }; - } - }; + } + } else { + let out = ac + .aggregated() + .list() + .unwrap() + .try_apply_amortized(|s| s.as_ref().take(idx))?; + + ac.with_series(out.into_series(), true, Some(&self.expr))?; + ac.with_update_groups(UpdateGroups::WithGroupsLen); + Ok(ac) + }; + } + }; let s = idx.cast(&DataType::List(Box::new(IDX_DTYPE)))?; let idx = s.list().unwrap(); diff --git a/polars/polars-lazy/src/physical_plan/expressions/ternary.rs b/polars/polars-lazy/src/physical_plan/expressions/ternary.rs index 1281c0e9a427..541b0deccb91 100644 --- a/polars/polars-lazy/src/physical_plan/expressions/ternary.rs +++ b/polars/polars-lazy/src/physical_plan/expressions/ternary.rs @@ -199,7 +199,7 @@ impl PhysicalExpr for TernaryExpr { check_length(ca, mask)?; let mut out: ListChunked = ca .into_iter() - .zip(mask.into_iter()) + .zip(mask) .map(|(truthy, take)| match (truthy, take) { (Some(v), Some(true)) => Some(v), (Some(_), Some(false)) => None, @@ -217,7 +217,7 @@ impl PhysicalExpr for TernaryExpr { check_length(ca, mask)?; let mut out: ListChunked = ca .into_iter() - .zip(mask.into_iter()) + .zip(mask) .map(|(falsy, take)| match (falsy, take) { (Some(_), Some(true)) => None, (Some(v), Some(false)) => Some(v), @@ -239,7 +239,7 @@ impl PhysicalExpr for TernaryExpr { check_length(ca, mask)?; let mut out: ListChunked = ca .into_iter() - .zip(mask.into_iter()) + .zip(mask) .map(|(falsy, take)| match (falsy, take) { (Some(_), Some(true)) => Some(literal.clone()), (Some(v), Some(false)) => Some(v), @@ -256,7 +256,7 @@ impl PhysicalExpr for TernaryExpr { check_length(ca, mask)?; let mut out: ListChunked = ca .into_iter() - .zip(mask.into_iter()) + .zip(mask) .map(|(truthy, take)| match (truthy, take) { (Some(v), Some(true)) => Some(v), (Some(_), Some(false)) => Some(literal.clone()), diff --git a/polars/polars-lazy/src/physical_plan/expressions/window.rs b/polars/polars-lazy/src/physical_plan/expressions/window.rs index 6e6f345db2d6..547fe27dffbe 100644 --- a/polars/polars-lazy/src/physical_plan/expressions/window.rs +++ b/polars/polars-lazy/src/physical_plan/expressions/window.rs @@ -704,7 +704,7 @@ where } } let mut values = Vec::with_capacity(len); - let ptr = values.as_mut_ptr() as *mut T::Native; + let ptr: *mut T::Native = values.as_mut_ptr(); // safety: // we will write from different threads but we will never alias. let sync_ptr_values = unsafe { SyncPtr::new(ptr) }; diff --git a/polars/polars-lazy/src/physical_plan/streaming/construct_pipeline.rs b/polars/polars-lazy/src/physical_plan/streaming/construct_pipeline.rs index 6201aef64598..1c3f8d1d0af9 100644 --- a/polars/polars-lazy/src/physical_plan/streaming/construct_pipeline.rs +++ b/polars/polars-lazy/src/physical_plan/streaming/construct_pipeline.rs @@ -61,7 +61,9 @@ fn jit_insert_slice( use ALogicalPlan::*; let (offset, len) = match lp_arena.get(node) { Join { options, .. } if options.args.slice.is_some() => { - let Some((offset, len)) = options.args.slice else { unreachable!()}; + let Some((offset, len)) = options.args.slice else { + unreachable!() + }; (offset, len) } Union { @@ -185,7 +187,9 @@ pub(super) fn construct( // also pipelines are not ready to receive inputs otherwise pipelines.sort_by(|a, b| a.0.cmp(&b.0)); - let Some(final_sink) = final_sink else { return Ok(None) }; + let Some(final_sink) = final_sink else { + return Ok(None); + }; let insertion_location = match lp_arena.get(final_sink) { FileSink { input, @@ -211,7 +215,9 @@ pub(super) fn construct( None }; - let Some((_, mut most_left)) = pipelines.pop() else {unreachable!()}; + let Some((_, mut most_left)) = pipelines.pop() else { + unreachable!() + }; while let Some((_, rhs)) = pipelines.pop() { most_left = most_left.with_other_branch(rhs) } diff --git a/polars/polars-lazy/src/physical_plan/streaming/convert_alp.rs b/polars/polars-lazy/src/physical_plan/streaming/convert_alp.rs index b8fd4bc14446..8e6ce83f1b69 100644 --- a/polars/polars-lazy/src/physical_plan/streaming/convert_alp.rs +++ b/polars/polars-lazy/src/physical_plan/streaming/convert_alp.rs @@ -287,7 +287,9 @@ pub(crate) fn insert_streaming_nodes( } if *offset >= 0 => { insert_slice(root, *offset, *len as IdxSize, lp_arena, &mut state); state.streamable = true; - let Union {inputs, ..} = lp_arena.get(root) else {unreachable!()}; + let Union { inputs, .. } = lp_arena.get(root) else { + unreachable!() + }; for (i, input) in inputs.iter().enumerate() { let mut state = if i == 0 { // note the clone! diff --git a/polars/polars-lazy/src/tests/predicate_queries.rs b/polars/polars-lazy/src/tests/predicate_queries.rs index 80363268acb1..0a854bf420a6 100644 --- a/polars/polars-lazy/src/tests/predicate_queries.rs +++ b/polars/polars-lazy/src/tests/predicate_queries.rs @@ -44,7 +44,7 @@ fn test_issue_2472() -> PolarsResult<()> { let extract = col("group") .cast(DataType::Utf8) .str() - .extract(r#"(\d+-){4}(\w+)-"#, 2) + .extract(r"(\d+-){4}(\w+)-", 2) .cast(DataType::Int32) .alias("age"); let predicate = col("age").is_in(lit(Series::new("", [2i32]))); diff --git a/polars/polars-ops/src/chunked_array/list/namespace.rs b/polars/polars-ops/src/chunked_array/list/namespace.rs index 9cbb733c4ca8..da61fa9ff5cb 100644 --- a/polars/polars-ops/src/chunked_array/list/namespace.rs +++ b/polars/polars-ops/src/chunked_array/list/namespace.rs @@ -290,7 +290,7 @@ pub trait ListNameSpaceImpl: AsList { let idx_ca = idx.list().unwrap(); let mut out = list_ca .amortized_iter() - .zip(idx_ca.into_iter()) + .zip(idx_ca) .map(|(opt_s, opt_idx)| { { match (opt_s, opt_idx) { diff --git a/polars/polars-ops/src/chunked_array/strings/namespace.rs b/polars/polars-ops/src/chunked_array/strings/namespace.rs index afe1e7134c05..1e056fbcc86b 100644 --- a/polars/polars-ops/src/chunked_array/strings/namespace.rs +++ b/polars/polars-ops/src/chunked_array/strings/namespace.rs @@ -336,7 +336,7 @@ pub trait Utf8NameSpaceImpl: AsUtf8 { let mut builder = ListUtf8ChunkedBuilder::new(ca.name(), ca.len(), ca.get_values_size()); - for (opt_s, opt_pat) in ca.into_iter().zip(pat.into_iter()) { + for (opt_s, opt_pat) in ca.into_iter().zip(pat) { match (opt_s, opt_pat) { (_, None) | (None, _) => builder.append_null(), (Some(s), Some(pat)) => { diff --git a/polars/polars-ops/src/chunked_array/top_k.rs b/polars/polars-ops/src/chunked_array/top_k.rs index f50e8ad62dcf..c1b5ac930032 100644 --- a/polars/polars-ops/src/chunked_array/top_k.rs +++ b/polars/polars-ops/src/chunked_array/top_k.rs @@ -17,7 +17,7 @@ impl PartialEq for Compare { impl PartialOrd for Compare { fn partial_cmp(&self, other: &Self) -> Option { - Some(compare_fn_nan_max(&self.0, &other.0)) + Some(self.cmp(other)) } } @@ -25,9 +25,7 @@ impl Eq for Compare {} impl Ord for Compare { fn cmp(&self, other: &Self) -> Ordering { - // Safety: - // we always return Some - unsafe { self.partial_cmp(other).unwrap_unchecked() } + compare_fn_nan_max(&self.0, &other.0) } } diff --git a/polars/polars-ops/src/frame/pivot/positioning.rs b/polars/polars-ops/src/frame/pivot/positioning.rs index 65754c9ec038..c64f861d6554 100644 --- a/polars/polars-ops/src/frame/pivot/positioning.rs +++ b/polars/polars-ops/src/frame/pivot/positioning.rs @@ -131,6 +131,8 @@ where let col_locations = &col_locations[offset..offset + len]; let value_agg_phys = value_agg_phys.slice(offset as i64, len); + // todo! remove lint silencing + #[allow(clippy::useless_conversion)] for ((row_idx, col_idx), val) in row_locations .iter() .zip(col_locations) diff --git a/polars/polars-ops/src/series/ops/floor_divide.rs b/polars/polars-ops/src/series/ops/floor_divide.rs index e673125295aa..f536caeb2d49 100644 --- a/polars/polars-ops/src/series/ops/floor_divide.rs +++ b/polars/polars-ops/src/series/ops/floor_divide.rs @@ -45,7 +45,7 @@ fn floor_div_array( } else { let iter = a .into_iter() - .zip(b.into_iter()) + .zip(b) .map(|(opt_a, opt_b)| match (opt_a, opt_b) { (Some(&a), Some(&b)) => Some(floor_div_element(a, b)), _ => None, diff --git a/polars/polars-time/src/windows/duration.rs b/polars/polars-time/src/windows/duration.rs index 455be0af9f28..90cfc2eee5f2 100644 --- a/polars/polars-time/src/windows/duration.rs +++ b/polars/polars-time/src/windows/duration.rs @@ -45,7 +45,7 @@ pub struct Duration { impl PartialOrd for Duration { fn partial_cmp(&self, other: &Self) -> Option { - self.duration_ns().partial_cmp(&other.duration_ns()) + Some(self.cmp(other)) } } diff --git a/py-polars/src/apply/lazy.rs b/py-polars/src/apply/lazy.rs index c73b0bbc9fd3..63778acb54fe 100644 --- a/py-polars/src/apply/lazy.rs +++ b/py-polars/src/apply/lazy.rs @@ -137,7 +137,7 @@ pub fn map_single( pub(crate) fn call_lambda_with_series_slice( py: Python, - s: &mut [Series], + s: &[Series], lambda: &PyObject, polars_module: &PyObject, ) -> PyObject { diff --git a/py-polars/src/series/set_at_idx.rs b/py-polars/src/series/set_at_idx.rs index f47b1fa8283b..c6f65e1b3bdf 100644 --- a/py-polars/src/series/set_at_idx.rs +++ b/py-polars/src/series/set_at_idx.rs @@ -47,52 +47,52 @@ fn set_at_idx(mut s: Series, idx: &Series, values: &Series) -> PolarsResult { let ca: &mut ChunkedArray = mutable_s.as_mut(); let values = values.i8()?; - std::mem::take(ca).set_at_idx2(idx, values.into_iter()) + std::mem::take(ca).set_at_idx2(idx, values) } DataType::Int16 => { let ca: &mut ChunkedArray = mutable_s.as_mut(); let values = values.i16()?; - std::mem::take(ca).set_at_idx2(idx, values.into_iter()) + std::mem::take(ca).set_at_idx2(idx, values) } DataType::Int32 => { let ca: &mut ChunkedArray = mutable_s.as_mut(); let values = values.i32()?; - std::mem::take(ca).set_at_idx2(idx, values.into_iter()) + std::mem::take(ca).set_at_idx2(idx, values) } DataType::Int64 => { let ca: &mut ChunkedArray = mutable_s.as_mut(); let values = values.i64()?; - std::mem::take(ca).set_at_idx2(idx, values.into_iter()) + std::mem::take(ca).set_at_idx2(idx, values) } DataType::UInt8 => { let ca: &mut ChunkedArray = mutable_s.as_mut(); let values = values.u8()?; - std::mem::take(ca).set_at_idx2(idx, values.into_iter()) + std::mem::take(ca).set_at_idx2(idx, values) } DataType::UInt16 => { let ca: &mut ChunkedArray = mutable_s.as_mut(); let values = values.u16()?; - std::mem::take(ca).set_at_idx2(idx, values.into_iter()) + std::mem::take(ca).set_at_idx2(idx, values) } DataType::UInt32 => { let ca: &mut ChunkedArray = mutable_s.as_mut(); let values = values.u32()?; - std::mem::take(ca).set_at_idx2(idx, values.into_iter()) + std::mem::take(ca).set_at_idx2(idx, values) } DataType::UInt64 => { let ca: &mut ChunkedArray = mutable_s.as_mut(); let values = values.u64()?; - std::mem::take(ca).set_at_idx2(idx, values.into_iter()) + std::mem::take(ca).set_at_idx2(idx, values) } DataType::Float32 => { let ca: &mut ChunkedArray = mutable_s.as_mut(); let values = values.f32()?; - std::mem::take(ca).set_at_idx2(idx, values.into_iter()) + std::mem::take(ca).set_at_idx2(idx, values) } DataType::Float64 => { let ca: &mut ChunkedArray = mutable_s.as_mut(); let values = values.f64()?; - std::mem::take(ca).set_at_idx2(idx, values.into_iter()) + std::mem::take(ca).set_at_idx2(idx, values) } DataType::Boolean => { let ca = s.bool()?; diff --git a/rust-toolchain.toml b/rust-toolchain.toml index bc8b526821c3..8eed5f0be0ad 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "nightly-2023-06-23" +channel = "nightly-2023-07-27"