From fd24a5e3ec15bcca09df545dabf8af4fdb445a3a Mon Sep 17 00:00:00 2001 From: Cameron Riddell Date: Fri, 10 Jan 2025 11:21:29 -0800 Subject: [PATCH] fix root names in pyspark reduction with nw.all() --- narwhals/_spark_like/expr.py | 4 +++- tests/expr_and_series/n_unique_test.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/narwhals/_spark_like/expr.py b/narwhals/_spark_like/expr.py index efd3975ff..b6d6df914 100644 --- a/narwhals/_spark_like/expr.py +++ b/narwhals/_spark_like/expr.py @@ -102,7 +102,9 @@ def func(df: SparkLikeLazyFrame) -> list[Column]: for _input in inputs: input_col_name = get_column_name(df, _input) column_result = call(_input, **_kwargs) - if not returns_scalar: + if not returns_scalar or ( + (self._depth == 0) and (self._function_name == "all") + ): column_result = column_result.alias(input_col_name) results.append(column_result) return results diff --git a/tests/expr_and_series/n_unique_test.py b/tests/expr_and_series/n_unique_test.py index 1bcbe89fd..90bffb04b 100644 --- a/tests/expr_and_series/n_unique_test.py +++ b/tests/expr_and_series/n_unique_test.py @@ -13,7 +13,7 @@ def test_n_unique(constructor: Constructor) -> None: df = nw.from_native(constructor(data)) - result = df.select(nw.col("a", "b").n_unique()) + result = df.select(nw.all().n_unique()) expected = {"a": [3], "b": [4]} assert_equal_data(result, expected)