Skip to content

Commit

Permalink
edit err msg and type
Browse files Browse the repository at this point in the history
  • Loading branch information
FBruzzesi committed Jul 15, 2024
1 parent 6b64120 commit f277ef5
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 25 deletions.
14 changes: 5 additions & 9 deletions narwhals/_arrow/group_by.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,15 +72,11 @@ def agg_arrow(
simple_aggregations: dict[str, tuple[str, str]] = {}
for expr in exprs:
# e.g. agg(nw.mean('a')) # noqa: ERA001
if expr._depth != 1: # pragma: no cover
msg = f"Expr._depth should be 1, found {expr._depth}"
raise ValueError(msg)
if expr._root_names is None: # pragma: no cover
msg = "`Expr._root_names` cannot be None"
raise ValueError(msg)
if expr._output_names is None: # pragma: no cover
msg = "`Expr._output_names` cannot be None"
raise ValueError(msg)
if (
expr._depth != 1 or expr._root_names is None or expr._output_names is None
): # pragma: no cover
msg = "Safety assertion failed, please report a bug to https://github.com/narwhals-dev/narwhals/issues"
raise AssertionError(msg)

function_name = remove_prefix(expr._function_name, "col->")
function_name = POLARS_TO_ARROW_AGGREGATIONS.get(function_name, function_name)
Expand Down
8 changes: 4 additions & 4 deletions narwhals/_expression_parsing.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,8 +192,8 @@ def func(df: CompliantDataFrame) -> list[CompliantSeries]:
if expr._output_names is not None and (
[s.name for s in out] != expr._output_names
): # pragma: no cover
msg = "Found invalid series name"
raise ValueError(msg)
msg = "Safety assertion failed, please report a bug to https://github.com/narwhals-dev/narwhals/issues"
raise AssertionError(msg)
return out

# Try tracking root and output names by combining them from all
Expand All @@ -218,8 +218,8 @@ def func(df: CompliantDataFrame) -> list[CompliantSeries]:
(output_names is None and root_names is None)
or (output_names is not None and root_names is not None)
): # pragma: no cover
msg = "output_names and root_names are incompatible"
raise ValueError(msg)
msg = "Safety assertion failed, please report a bug to https://github.com/narwhals-dev/narwhals/issues"
raise AssertionError(msg)

return plx._create_expr_from_callable( # type: ignore[return-value]
func, # type: ignore[arg-type]
Expand Down
20 changes: 8 additions & 12 deletions narwhals/_pandas_like/group_by.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def __iter__(self) -> Iterator[tuple[Any, PandasLikeDataFrame]]:
)


def agg_pandas( # noqa: PLR0915
def agg_pandas(
grouped: Any,
exprs: list[PandasLikeExpr],
keys: list[str],
Expand Down Expand Up @@ -120,8 +120,8 @@ def agg_pandas( # noqa: PLR0915
if expr._depth == 0:
# e.g. agg(nw.len()) # noqa: ERA001
if expr._output_names is None: # pragma: no cover
msg = "`Expr._output_names` cannot be None"
raise ValueError(msg)
msg = "Safety assertion failed, please report a bug to https://github.com/narwhals-dev/narwhals/issues"
raise AssertionError(msg)

function_name = POLARS_TO_PANDAS_AGGREGATIONS.get(
expr._function_name, expr._function_name
Expand All @@ -131,15 +131,11 @@ def agg_pandas( # noqa: PLR0915
continue

# e.g. agg(nw.mean('a')) # noqa: ERA001
if expr._depth != 1: # pragma: no cover
msg = f"Expr._depth should be 1, found {expr._depth}"
raise ValueError(msg)
if expr._root_names is None: # pragma: no cover
msg = "`Expr._root_names` cannot be None"
raise ValueError(msg)
if expr._output_names is None: # pragma: no cover
msg = "`Expr._output_names` cannot be None"
raise ValueError(msg)
if (
expr._depth != 1 or expr._root_names is None or expr._output_names is None
): # pragma: no cover
msg = "Safety assertion failed, please report a bug to https://github.com/narwhals-dev/narwhals/issues"
raise AssertionError(msg)

function_name = remove_prefix(expr._function_name, "col->")
function_name = POLARS_TO_PANDAS_AGGREGATIONS.get(
Expand Down

0 comments on commit f277ef5

Please sign in to comment.