Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into test-pyspark
Browse files Browse the repository at this point in the history
  • Loading branch information
MarcoGorelli committed Jan 9, 2025
2 parents e4c8281 + 0f38521 commit d56b995
Show file tree
Hide file tree
Showing 3 changed files with 104 additions and 8 deletions.
2 changes: 1 addition & 1 deletion narwhals/_spark_like/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def __native_namespace__(self) -> Any: # pragma: no cover
def __narwhals_namespace__(self) -> SparkLikeNamespace:
from narwhals._spark_like.namespace import SparkLikeNamespace

return SparkLikeNamespace( # type: ignore[abstract]
return SparkLikeNamespace(
backend_version=self._backend_version, version=self._version
)

Expand Down
87 changes: 80 additions & 7 deletions narwhals/_spark_like/expr.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def __narwhals_namespace__(self) -> SparkLikeNamespace: # pragma: no cover
# Unused, just for compatibility with PandasLikeExpr
from narwhals._spark_like.namespace import SparkLikeNamespace

return SparkLikeNamespace( # type: ignore[abstract]
return SparkLikeNamespace(
backend_version=self._backend_version, version=self._version
)

Expand Down Expand Up @@ -139,32 +139,66 @@ def __ne__(self, other: SparkLikeExpr) -> Self: # type: ignore[override]

def __add__(self, other: SparkLikeExpr) -> Self:
return self._from_call(
lambda _input, other: _input + other,
lambda _input, other: _input.__add__(other),
"__add__",
other=other,
returns_scalar=False,
)

def __sub__(self, other: SparkLikeExpr) -> Self:
return self._from_call(
lambda _input, other: _input - other,
lambda _input, other: _input.__sub__(other),
"__sub__",
other=other,
returns_scalar=False,
)

def __mul__(self, other: SparkLikeExpr) -> Self:
return self._from_call(
lambda _input, other: _input * other,
lambda _input, other: _input.__mul__(other),
"__mul__",
other=other,
returns_scalar=False,
)

def __lt__(self, other: SparkLikeExpr) -> Self:
def __truediv__(self, other: SparkLikeExpr) -> Self:
return self._from_call(
lambda _input, other: _input < other,
"__lt__",
lambda _input, other: _input.__truediv__(other),
"__truediv__",
other=other,
returns_scalar=False,
)

def __floordiv__(self, other: SparkLikeExpr) -> Self:
def _floordiv(_input: Column, other: Column) -> Column:
from pyspark.sql import functions as F # noqa: N812

return F.floor(_input / other)

return self._from_call(
_floordiv, "__floordiv__", other=other, returns_scalar=False
)

def __pow__(self, other: SparkLikeExpr) -> Self:
return self._from_call(
lambda _input, other: _input.__pow__(other),
"__pow__",
other=other,
returns_scalar=False,
)

def __mod__(self, other: SparkLikeExpr) -> Self:
return self._from_call(
lambda _input, other: _input.__mod__(other),
"__mod__",
other=other,
returns_scalar=False,
)

def __ge__(self, other: SparkLikeExpr) -> Self:
return self._from_call(
lambda _input, other: _input.__ge__(other),
"__ge__",
other=other,
returns_scalar=False,
)
Expand All @@ -177,6 +211,45 @@ def __gt__(self, other: SparkLikeExpr) -> Self:
returns_scalar=False,
)

def __le__(self, other: SparkLikeExpr) -> Self:
return self._from_call(
lambda _input, other: _input.__le__(other),
"__le__",
other=other,
returns_scalar=False,
)

def __lt__(self, other: SparkLikeExpr) -> Self:
return self._from_call(
lambda _input, other: _input.__lt__(other),
"__lt__",
other=other,
returns_scalar=False,
)

def __and__(self, other: SparkLikeExpr) -> Self:
return self._from_call(
lambda _input, other: _input.__and__(other),
"__and__",
other=other,
returns_scalar=False,
)

def __or__(self, other: SparkLikeExpr) -> Self:
return self._from_call(
lambda _input, other: _input.__or__(other),
"__or__",
other=other,
returns_scalar=False,
)

def __invert__(self) -> Self:
return self._from_call(
lambda _input: _input.__invert__(),
"__invert__",
returns_scalar=self._returns_scalar,
)

def abs(self) -> Self:
from pyspark.sql import functions as F # noqa: N812

Expand Down
23 changes: 23 additions & 0 deletions narwhals/_spark_like/namespace.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

from narwhals._spark_like.dataframe import SparkLikeLazyFrame
from narwhals._spark_like.typing import IntoSparkLikeExpr
from narwhals.dtypes import DType
from narwhals.utils import Version


Expand Down Expand Up @@ -67,6 +68,28 @@ def col(self, *column_names: str) -> SparkLikeExpr:
*column_names, backend_version=self._backend_version, version=self._version
)

def lit(self, value: object, dtype: DType | None) -> SparkLikeExpr:
if dtype is not None:
msg = "todo"
raise NotImplementedError(msg)

def _lit(_: SparkLikeLazyFrame) -> list[Column]:
import pyspark.sql.functions as F # noqa: N812

return [F.lit(value).alias("literal")]

return SparkLikeExpr( # type: ignore[abstract]
call=_lit,
depth=0,
function_name="lit",
root_names=None,
output_names=["literal"],
returns_scalar=True,
backend_version=self._backend_version,
version=self._version,
kwargs={},
)

def sum_horizontal(self, *exprs: IntoSparkLikeExpr) -> SparkLikeExpr:
parsed_exprs = parse_into_exprs(*exprs, namespace=self)

Expand Down

0 comments on commit d56b995

Please sign in to comment.