Skip to content

Commit

Permalink
add no covers + change qc dispatch
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-joshi committed Aug 30, 2024
1 parent c42b4d0 commit 0956d1c
Showing 1 changed file with 4 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2072,7 +2072,7 @@ def abs(self): # noqa: RT01, D200
Return a `BasePandasDataset` with absolute numeric value of each element.
"""
# TODO: SNOW-1119855: Modin upgrade - modin.pandas.base.BasePandasDataset
return self.__constructor__(query_compiler=self._query_compiler.unary_op("abs"))
return self.__constructor__(query_compiler=self._query_compiler.abs())


# Modin does dtype validation on unary ops that Snowpark pandas does not.
Expand Down Expand Up @@ -2101,7 +2101,7 @@ def __neg__(self):
BasePandasDataset
"""
# TODO: SNOW-1119855: Modin upgrade - modin.pandas.base.BasePandasDataset
return self.__constructor__(query_compiler=self._query_compiler.unary_op("__neg__"))
return self.__constructor__(query_compiler=self._query_compiler.negative())


# Modin needs to add a check for mapper is not None, which changes query counts in test_concat.py
Expand Down Expand Up @@ -2199,7 +2199,7 @@ def __array_ufunc__(self, ufunc: np.ufunc, method: str, *inputs, **kwargs):
# Use pandas version of ufunc if it exists
if method != "__call__":
# Return sentinel value NotImplemented
return NotImplemented
return NotImplemented # pragma: no cover
from snowflake.snowpark.modin.plugin.utils.numpy_to_pandas import (
numpy_to_pandas_universal_func_map,
)
Expand All @@ -2208,7 +2208,7 @@ def __array_ufunc__(self, ufunc: np.ufunc, method: str, *inputs, **kwargs):
ufunc = numpy_to_pandas_universal_func_map[ufunc.__name__]
return ufunc(self, inputs[1:], kwargs)
# return the sentinel NotImplemented if we do not support this function
return NotImplemented
return NotImplemented # pragma: no cover


# Snowpark pandas does extra argument validation.
Expand Down

0 comments on commit 0956d1c

Please sign in to comment.