Skip to content

Commit

Permalink
chore: update rustc (pola-rs#12468)
Browse files Browse the repository at this point in the history
  • Loading branch information
ritchie46 authored Nov 15, 2023
1 parent 38b5322 commit 61cd8b8
Show file tree
Hide file tree
Showing 32 changed files with 31 additions and 50 deletions.
1 change: 0 additions & 1 deletion crates/polars-arrow/src/array/boolean/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ mod from;
mod iterator;
mod mutable;

pub use iterator::*;
pub use mutable::*;
use polars_error::{polars_bail, PolarsResult};

Expand Down
2 changes: 1 addition & 1 deletion crates/polars-arrow/src/array/fixed_size_list/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ mod data;
mod ffi;
pub(super) mod fmt;
mod iterator;
pub use iterator::*;

mod mutable;
pub use mutable::*;
use polars_error::{polars_bail, PolarsResult};
Expand Down
2 changes: 1 addition & 1 deletion crates/polars-arrow/src/array/growable/structure.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ impl<'a> Growable<'a> for GrowableStruct<'a> {
// All children should have the same indexing, so just use the first
// one. If we don't have children, we might still have a validity
// array, so use that.
if let Some(child) = self.values.get(0) {
if let Some(child) = self.values.first() {
child.len()
} else {
self.validity.len()
Expand Down
2 changes: 1 addition & 1 deletion crates/polars-arrow/src/array/map/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ mod data;
mod ffi;
pub(super) mod fmt;
mod iterator;
pub use iterator::*;

use polars_error::{polars_bail, PolarsResult};

/// An array representing a (key, value), both of arbitrary logical types.
Expand Down
2 changes: 1 addition & 1 deletion crates/polars-arrow/src/array/primitive/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ mod ffi;
pub(super) mod fmt;
mod from_natural;
mod iterator;
pub use iterator::*;

mod mutable;
pub use mutable::*;
use polars_error::{polars_bail, PolarsResult};
Expand Down
6 changes: 1 addition & 5 deletions crates/polars-arrow/src/compute/aggregate/simd/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,6 @@ simd_ord_int!(i128x8, i128);

#[cfg(not(feature = "simd"))]
mod native;
#[cfg(not(feature = "simd"))]
pub use native::*;

#[cfg(feature = "simd")]
mod packed;
#[cfg(feature = "simd")]
#[cfg_attr(docsrs, doc(cfg(feature = "simd")))]
pub use packed::*;
5 changes: 1 addition & 4 deletions crates/polars-arrow/src/compute/comparison/simd/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,6 @@ macro_rules! simd8_native_all {

#[cfg(not(feature = "simd"))]
mod native;
#[cfg(not(feature = "simd"))]
pub use native::*;

#[cfg(feature = "simd")]
mod packed;
#[cfg(feature = "simd")]
pub use packed::*;
2 changes: 1 addition & 1 deletion crates/polars-arrow/src/legacy/kernels/sort_partition.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ pub fn partition_to_groups_amortized<T>(
) where
T: Debug + NativeType + PartialOrd,
{
if let Some(mut first) = values.get(0) {
if let Some(mut first) = values.first() {
out.clear();
if nulls_first && first_group_offset > 0 {
out.push([0, first_group_offset])
Expand Down
4 changes: 2 additions & 2 deletions crates/polars-core/src/chunked_array/logical/struct_/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ impl StructChunked {
}
pub fn new(name: &str, fields: &[Series]) -> PolarsResult<Self> {
let mut names = PlHashSet::with_capacity(fields.len());
let first_len = fields.get(0).map(|s| s.len()).unwrap_or(0);
let first_len = fields.first().map(|s| s.len()).unwrap_or(0);
let mut max_len = first_len;

let mut all_equal_len = true;
Expand Down Expand Up @@ -243,7 +243,7 @@ impl StructChunked {
}

pub fn len(&self) -> usize {
self.fields.get(0).map(|s| s.len()).unwrap_or(0)
self.fields.first().map(|s| s.len()).unwrap_or(0)
}
pub fn is_empty(&self) -> bool {
self.len() == 0
Expand Down
3 changes: 0 additions & 3 deletions crates/polars-core/src/chunked_array/ops/rolling_window.rs
Original file line number Diff line number Diff line change
Expand Up @@ -250,6 +250,3 @@ mod inner_mod {
}
}
}

#[cfg(feature = "rolling_window")]
pub use inner_mod::*;
2 changes: 1 addition & 1 deletion crates/polars-core/src/datatypes/_serde.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
//!
//! We could use [serde_1712](https://github.com/serde-rs/serde/issues/1712), but that gave problems caused by
//! [rust_96956](https://github.com/rust-lang/rust/issues/96956), so we make a dummy type without static
pub use arrow::datatypes::ArrowDataType;

use serde::{Deserialize, Deserializer, Serialize, Serializer};

use super::*;
Expand Down
4 changes: 2 additions & 2 deletions crates/polars-core/src/frame/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -626,7 +626,7 @@ impl DataFrame {

/// The number of chunks per column
pub fn n_chunks(&self) -> usize {
match self.columns.get(0) {
match self.columns.first() {
None => 0,
Some(s) => s.n_chunks(),
}
Expand Down Expand Up @@ -1234,7 +1234,7 @@ impl DataFrame {
/// }
/// ```
pub fn get(&self, idx: usize) -> Option<Vec<AnyValue>> {
match self.columns.get(0) {
match self.columns.first() {
Some(s) => {
if s.len() <= idx {
return None;
Expand Down
1 change: 0 additions & 1 deletion crates/polars-core/src/prelude.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ pub use crate::schema::*;
pub use crate::series::arithmetic::checked::NumOpsDispatchChecked;
pub use crate::series::arithmetic::{LhsNumOps, NumOpsDispatch};
pub use crate::series::{IntoSeries, Series, SeriesTrait};
pub use crate::testing::*;
pub(crate) use crate::utils::CustomIterTools;
pub use crate::utils::IntoVec;
pub use crate::{datatypes, df};
1 change: 0 additions & 1 deletion crates/polars-core/src/series/series_trait.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ use serde::{Deserialize, Serialize};

#[cfg(feature = "object")]
use crate::chunked_array::object::PolarsObjectSafe;
pub use crate::prelude::ChunkCompare;
use crate::prelude::*;

#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ pub(super) fn check_expand_literals(
mut selected_columns: Vec<Series>,
zero_length: bool,
) -> PolarsResult<DataFrame> {
let Some(first_len) = selected_columns.get(0).map(|s| s.len()) else {
let Some(first_len) = selected_columns.first().map(|s| s.len()) else {
return Ok(DataFrame::empty());
};
let mut df_height = 0;
Expand Down
2 changes: 1 addition & 1 deletion crates/polars-ops/src/chunked_array/strings/json_path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ pub fn extract_json<'a>(expr: &PathCompiled, json_str: &'a str) -> Option<Cow<'a
serde_json::from_str(json_str).ok().and_then(|value| {
// TODO: a lot of heap allocations here. Improve json path by adding a take?
let result = expr.select(&value).ok()?;
let first = *result.get(0)?;
let first = *result.first()?;

match first {
Value::String(s) => Some(Cow::Owned(s.clone())),
Expand Down
1 change: 0 additions & 1 deletion crates/polars-ops/src/frame/join/hash_join/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ mod single_keys_semi_anti;
pub(super) mod sort_merge;
mod zip_outer;

pub use args::*;
pub use multiple_keys::private_left_join_multiple_keys;
pub(super) use multiple_keys::*;
use polars_core::utils::{_set_partition_size, slice_slice, split_ca};
Expand Down
2 changes: 1 addition & 1 deletion crates/polars-ops/src/series/ops/approx_unique.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use polars_core::prelude::*;
use polars_core::with_match_physical_integer_polars_type;

#[cfg(feature = "approx_unique")]
use crate::series::HyperLogLog;
use crate::series::ops::approx_algo::HyperLogLog;

fn approx_n_unique_ca<'a, T>(ca: &'a ChunkedArray<T>) -> PolarsResult<Series>
where
Expand Down
2 changes: 2 additions & 0 deletions crates/polars-ops/src/series/ops/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#[cfg(feature = "abs")]
mod abs;
#[cfg(feature = "approx_unique")]
mod approx_algo;
#[cfg(feature = "approx_unique")]
mod approx_unique;
Expand Down Expand Up @@ -52,6 +53,7 @@ mod various;

#[cfg(feature = "abs")]
pub use abs::*;
#[cfg(feature = "approx_unique")]
pub use approx_algo::*;
#[cfg(feature = "approx_unique")]
pub use approx_unique::*;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ use arrow::bitmap::MutableBitmap;
use polars_error::PolarsResult;

use super::super::Pages;
pub use super::utils::Zip;
use super::utils::{DecodedState, MaybeNext, PageState};
use crate::parquet::encoding::hybrid_rle::HybridRleDecoder;
use crate::parquet::page::{split_buffer, DataPage, DictPage, Page};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ impl<R: Read + Seek> IndexedPageReader<R> {
fn read_dict(&mut self) -> Option<Result<CompressedPage, Error>> {
// a dictionary page exists iff the first data page is not at the start of
// the column
let (start, length) = match self.pages.get(0) {
let (start, length) = match self.pages.front() {
Some(page) => {
let length = (page.start - self.column_start) as usize;
if length > 0 {
Expand Down
2 changes: 0 additions & 2 deletions crates/polars-parquet/src/parquet/schema/io_thrift/mod.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
mod from_thrift;
pub use from_thrift::*;

mod to_thrift;
pub use to_thrift::*;

#[cfg(test)]
mod tests {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use parquet_format_safe::{BoundaryOrder, ColumnIndex, OffsetIndex, PageLocation};

use crate::parquet::error::{Error, Result};
pub use crate::parquet::metadata::KeyValue;
use crate::parquet::statistics::serialize_statistics;
use crate::parquet::write::page::{is_data_page, PageWriteSpec};

Expand Down
1 change: 0 additions & 1 deletion crates/polars-parquet/src/parquet/write/indexes/write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ use parquet_format_safe::thrift::protocol::TCompactOutputStreamProtocol;

use super::serialize::{serialize_column_index, serialize_offset_index};
use crate::parquet::error::Result;
pub use crate::parquet::metadata::KeyValue;
use crate::parquet::write::page::PageWriteSpec;

pub fn write_column_index<W: Write>(writer: &mut W, pages: &[PageWriteSpec]) -> Result<u64> {
Expand Down
6 changes: 3 additions & 3 deletions crates/polars-parquet/src/parquet/write/row_group.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ impl ColumnOffsetsMetadata {

fn compute_num_rows(columns: &[(ColumnChunk, Vec<PageWriteSpec>)]) -> Result<i64> {
columns
.get(0)
.first()
.map(|(_, specs)| {
let mut num_rows = 0;
specs
Expand Down Expand Up @@ -101,7 +101,7 @@ where

// compute row group stats
let file_offset = columns
.get(0)
.first()
.map(|(column_chunk, _)| {
ColumnOffsetsMetadata::from_column_chunk(column_chunk).calc_row_group_file_offset()
})
Expand Down Expand Up @@ -167,7 +167,7 @@ where

// compute row group stats
let file_offset = columns
.get(0)
.first()
.map(|(column_chunk, _)| {
ColumnOffsetsMetadata::from_column_chunk(column_chunk).calc_row_group_file_offset()
})
Expand Down
2 changes: 1 addition & 1 deletion crates/polars-plan/src/logical_plan/projection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ fn rewrite_special_aliases(expr: Expr) -> PolarsResult<Expr> {
Expr::KeepName(expr) => {
let roots = expr_to_leaf_column_names(&expr);
let name = roots
.get(0)
.first()
.expect("expected root column to keep expression name");
Ok(Expr::Alias(expr, name.clone()))
},
Expand Down
2 changes: 1 addition & 1 deletion crates/polars-plan/src/logical_plan/pyarrow.rs
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ pub(super) fn predicate_to_pa(
input,
..
} => {
let col = predicate_to_pa(*input.get(0)?, expr_arena, args)?;
let col = predicate_to_pa(*input.first()?, expr_arena, args)?;
let mut args = args;
args.allow_literal_series = true;
let values = predicate_to_pa(*input.get(1)?, expr_arena, args)?;
Expand Down
10 changes: 5 additions & 5 deletions crates/polars-sql/src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ impl SQLContext {
.parse_statements()
.map_err(to_compute_err)?;
polars_ensure!(ast.len() == 1, ComputeError: "One and only one statement at a time please");
let res = self.execute_statement(ast.get(0).unwrap());
let res = self.execute_statement(ast.first().unwrap());
// Every execution should clear the CTE map.
self.cte_map.borrow_mut().clear();
self.aliases.borrow_mut().clear();
Expand Down Expand Up @@ -337,7 +337,7 @@ impl SQLContext {
// Implicit joins require some more work in query parsers, explicit joins are preferred for now.
let sql_tbl: &TableWithJoins = select_stmt
.from
.get(0)
.first()
.ok_or_else(|| polars_err!(ComputeError: "no table name provided in query"))?;

let mut lf = self.execute_from_statement(sql_tbl)?;
Expand Down Expand Up @@ -548,7 +548,7 @@ impl SQLContext {
..
} = stmt
{
let tbl_name = name.0.get(0).unwrap().value.as_str();
let tbl_name = name.0.first().unwrap().value.as_str();
// CREATE TABLE IF NOT EXISTS
if *if_not_exists && self.table_map.contains_key(tbl_name) {
polars_bail!(ComputeError: "relation {} already exists", tbl_name);
Expand Down Expand Up @@ -579,7 +579,7 @@ impl SQLContext {
if let Some(args) = args {
return self.execute_tbl_function(name, alias, args);
}
let tbl_name = name.0.get(0).unwrap().value.as_str();
let tbl_name = name.0.first().unwrap().value.as_str();
if let Some(lf) = self.get_table_from_current_scope(tbl_name) {
match alias {
Some(alias) => {
Expand All @@ -605,7 +605,7 @@ impl SQLContext {
alias: &Option<TableAlias>,
args: &[FunctionArg],
) -> PolarsResult<(String, LazyFrame)> {
let tbl_fn = name.0.get(0).unwrap().value.as_str();
let tbl_fn = name.0.first().unwrap().value.as_str();
let read_fn = tbl_fn.parse::<PolarsTableFunctions>()?;
let (tbl_name, lf) = read_fn.execute(args)?;
let tbl_name = alias
Expand Down
2 changes: 0 additions & 2 deletions crates/polars-time/src/prelude.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
pub use date_range::*;

pub use crate::chunkedarray::*;
pub use crate::series::TemporalMethods;
pub use crate::windows::bounds::*;
Expand Down
2 changes: 1 addition & 1 deletion py-polars/src/arrow_interop/to_rust.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ pub fn array_to_rust(obj: &PyAny) -> PyResult<ArrayRef> {

pub fn to_rust_df(rb: &[&PyAny]) -> PyResult<DataFrame> {
let schema = rb
.get(0)
.first()
.ok_or_else(|| PyPolarsErr::Other("empty table".into()))?
.getattr("schema")?;
let names = schema.getattr("names")?.extract::<Vec<String>>()?;
Expand Down
2 changes: 1 addition & 1 deletion py-polars/src/lazyframe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ impl PyLazyFrame {
path
} else {
paths
.get(0)
.first()
.ok_or_else(|| PyValueError::new_err("expected a path argument"))?
};

Expand Down
2 changes: 1 addition & 1 deletion rust-toolchain.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
[toolchain]
channel = "nightly-2023-10-12"
channel = "nightly-2023-11-15"

0 comments on commit 61cd8b8

Please sign in to comment.