Skip to content

Commit

Permalink
fix lint issues
Browse files Browse the repository at this point in the history
  • Loading branch information
shehabgamin committed Jan 28, 2025
1 parent 17df1bc commit 97c7db0
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 51 deletions.
6 changes: 3 additions & 3 deletions datafusion/expr/src/type_coercion/functions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -387,8 +387,8 @@ fn get_valid_types(

// We need to find the coerced base type, mainly for cases like:
// `array_append(List(null), i64)` -> `List(i64)`
let array_base_type = datafusion_common::utils::base_type(array_type);
let elem_base_type = datafusion_common::utils::base_type(elem_type);
let array_base_type = base_type(array_type);
let elem_base_type = base_type(elem_type);
let new_base_type = comparison_coercion(&array_base_type, &elem_base_type);

let new_base_type = new_base_type.ok_or_else(|| {
Expand Down Expand Up @@ -892,7 +892,7 @@ fn coerced_from<'a>(
// Only accept list and largelist with the same number of dimensions unless the type is Null.
// List or LargeList with different dimensions should be handled in TypeSignature or other places before this
(List(_) | LargeList(_), _)
if datafusion_common::utils::base_type(type_from).eq(&Null)
if base_type(type_from).eq(&Null)
|| list_ndims(type_from) == list_ndims(type_into) =>
{
Some(type_into.clone())
Expand Down
48 changes: 0 additions & 48 deletions datafusion/optimizer/src/analyzer/type_coercion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1012,7 +1012,6 @@ fn project_with_column_index(

#[cfg(test)]
mod test {
use arrow::array::{ArrayRef, Int32Array, RecordBatch, StringArray};
use arrow::datatypes::DataType::Utf8;
use arrow::datatypes::{DataType, Field, TimeUnit};
use std::any::Any;
Expand All @@ -1025,7 +1024,6 @@ mod test {
use datafusion_common::config::ConfigOptions;
use datafusion_common::tree_node::{TransformedResult, TreeNode};
use datafusion_common::{DFSchema, DFSchemaRef, Result, ScalarValue};
use datafusion_expr::execution_props::ExecutionProps;
use datafusion_expr::expr::{self, InSubquery, Like, ScalarFunction};
use datafusion_expr::logical_plan::{EmptyRelation, Projection, Sort};
use datafusion_expr::test::function_stub::avg_udaf;
Expand All @@ -1035,9 +1033,7 @@ mod test {
Operator, ScalarUDF, ScalarUDFImpl, Signature, SimpleAggregateUDF, Subquery,
Volatility,
};
use datafusion_functions::expr_fn::ascii;
use datafusion_functions_aggregate::average::AvgAccumulator;
use datafusion_physical_expr::PhysicalExpr;

fn empty() -> Arc<LogicalPlan> {
Arc::new(LogicalPlan::EmptyRelation(EmptyRelation {
Expand Down Expand Up @@ -2133,48 +2129,4 @@ mod test {
assert_analyzed_plan_eq(Arc::new(TypeCoercion::new()), plan, expected)?;
Ok(())
}

/// Create a [`PhysicalExpr`] from an [`Expr`] after applying type coercion.
fn create_physical_expr_with_type_coercion(
expr: Expr,
df_schema: &DFSchema,
) -> Result<Arc<dyn PhysicalExpr>> {
let props = ExecutionProps::default();
let coerced_expr = expr
.rewrite(&mut TypeCoercionRewriter::new(df_schema))?
.data;
let physical_expr = datafusion_physical_expr::create_physical_expr(
&coerced_expr,
df_schema,
&props,
)?;
Ok(physical_expr)
}

fn evaluate_expr_with_array(
expr: Expr,
batch: RecordBatch,
df_schema: &DFSchema,
) -> Result<ArrayRef> {
let physical_expr = create_physical_expr_with_type_coercion(expr, df_schema)?;
match physical_expr.evaluate(&batch)? {
ColumnarValue::Array(result) => Ok(result),
_ => datafusion_common::internal_err!(
"Expected array result in evaluate_expr_with_array"
),
}
}

fn evaluate_expr_with_scalar(expr: Expr) -> Result<ScalarValue> {
let df_schema = DFSchema::empty();
let physical_expr = create_physical_expr_with_type_coercion(expr, &df_schema)?;
match physical_expr
.evaluate(&RecordBatch::new_empty(Arc::clone(df_schema.inner())))?
{
ColumnarValue::Scalar(result) => Ok(result),
_ => datafusion_common::internal_err!(
"Expected scalar result in evaluate_expr_with_scalar"
),
}
}
}

0 comments on commit 97c7db0

Please sign in to comment.