-
Notifications
You must be signed in to change notification settings - Fork 2
Adding array-api-compat fallback #159
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Large diffs are not rendered by default.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -27,6 +27,13 @@ def to_dense_( | |
| order: Literal["K", "A", "C", "F"] = "K", | ||
| to_cpu_memory: bool = False, | ||
| ) -> NDArray[Any] | types.CupyArray | types.DaskArray: | ||
| import array_api_compat | ||
|
|
||
| if not isinstance(x, np.ndarray) and array_api_compat.is_array_api_obj(x): | ||
| if to_cpu_memory: | ||
| return np.asarray(x, order=order) | ||
| return x # already dense | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I don’t think that follows, but I also don’t know if we can do better. |
||
|
|
||
| del to_cpu_memory # it already is | ||
| return np.asarray(x, order=order) | ||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -42,13 +42,20 @@ def generic_op( | |
| axis: Literal[0, 1] | None = None, | ||
| dtype: DTypeLike | None = None, | ||
| keep_cupy_as_array: bool = False, | ||
| ) -> NDArray[Any] | np.number[Any] | types.CupyArray | types.DaskArray: | ||
| ) -> NDArray[Any] | np.number[Any] | types.CupyArray | types.DaskArray: # switch to Any later | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. what does the comment mean? |
||
| del keep_cupy_as_array | ||
| if TYPE_CHECKING: | ||
| # these are never passed to this fallback function, but `singledispatch` wants them | ||
| assert not isinstance(x, types.CSBase | types.DaskArray | types.CupyArray | types.CupyCSMatrix) | ||
| # np supports these, but doesn’t know it. (TODO: test cupy) | ||
| assert not isinstance(x, types.ZarrArray | types.H5Dataset) | ||
|
|
||
| # doing array_api_compat first | ||
| import array_api_compat | ||
|
|
||
| if array_api_compat.is_array_api_obj(x): | ||
| xp = array_api_compat.array_namespace(x) | ||
| return getattr(xp, op)(x, axis=axis, **_dtype_kw(dtype, op)) | ||
|
Comment on lines
+53
to
+58
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. the code inside of |
||
| return cast("NDArray[Any] | np.number[Any]", _run_numpy_op(x, op, axis=axis, dtype=dtype)) | ||
|
|
||
|
|
||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,121 @@ | ||
| # SPDX-License-Identifier: MPL-2.0 | ||
| from __future__ import annotations | ||
|
|
||
| from importlib.util import find_spec | ||
| from typing import TYPE_CHECKING | ||
|
|
||
| import numpy as np | ||
| import pytest | ||
|
|
||
| from fast_array_utils import stats | ||
| from fast_array_utils.conv import to_dense | ||
|
|
||
|
|
||
| if TYPE_CHECKING: | ||
| from typing import Any, Literal | ||
|
|
||
| pytestmark = pytest.mark.skipif(not find_spec("jax"), reason="jax not installed") | ||
|
|
||
| if find_spec("jax"): | ||
| # enabling 64-bit precision in JAX as it defaults to 32-bit only | ||
| # problem as mean_var passes dtype= np.float64 internally, which crashes without this fix | ||
| import jax | ||
|
|
||
| jax.config.update("jax_enable_x64", True) | ||
|
|
||
|
|
||
| @pytest.fixture | ||
| def jax_arr() -> Any: | ||
| import jax.numpy as jnp | ||
|
|
||
| return jnp.array([[1, 0], [2, 0], [3, 0]], dtype=jnp.float32) | ||
|
|
||
|
|
||
| @pytest.mark.parametrize("axis", [None, 0, 1]) | ||
| def test_sum(jax_arr: Any, axis: Literal[0, 1] | None) -> None: | ||
| import jax.numpy as jnp | ||
|
|
||
| result = stats.sum(jax_arr, axis=axis) | ||
| expected = jnp.sum(jax_arr, axis=axis) | ||
| assert jnp.array_equal(result, expected) | ||
|
|
||
|
|
||
| @pytest.mark.parametrize("axis", [None, 0, 1]) | ||
| def test_min(jax_arr: Any, axis: Literal[0, 1] | None) -> None: | ||
| import jax.numpy as jnp | ||
|
|
||
| result = stats.min(jax_arr, axis=axis) | ||
| expected = jnp.min(jax_arr, axis=axis) | ||
| assert jnp.array_equal(result, expected) | ||
|
|
||
|
|
||
| @pytest.mark.parametrize("axis", [None, 0, 1]) | ||
| def test_max(jax_arr: Any, axis: Literal[0, 1] | None) -> None: | ||
| import jax.numpy as jnp | ||
|
|
||
| result = stats.max(jax_arr, axis=axis) | ||
| expected = jnp.max(jax_arr, axis=axis) | ||
| assert jnp.array_equal(result, expected) | ||
|
|
||
|
|
||
| @pytest.mark.parametrize("axis", [None, 0, 1]) | ||
| def test_mean(jax_arr: Any, axis: Literal[0, 1] | None) -> None: | ||
| import jax.numpy as jnp | ||
|
|
||
| result = stats.mean(jax_arr, axis=axis) | ||
| expected = jnp.mean(jax_arr, axis=axis) | ||
| assert jnp.allclose(result, expected) | ||
|
|
||
|
|
||
| @pytest.mark.parametrize("axis", [None, 0, 1]) | ||
| def test_is_constant(axis: Literal[0, 1] | None) -> None: | ||
| import jax.numpy as jnp | ||
|
|
||
| x = jnp.array( | ||
| [ | ||
| [0, 0, 1, 1], | ||
| [0, 0, 1, 1], | ||
| [0, 0, 0, 0], | ||
| [0, 0, 0, 0], | ||
| [0, 0, 1, 0], | ||
| [0, 0, 0, 0], | ||
| ], | ||
| dtype=jnp.float32, | ||
| ) | ||
| result = stats.is_constant(x, axis=axis) | ||
|
|
||
| if axis is None: | ||
| assert bool(result) is False | ||
| elif axis == 0: | ||
| expected = jnp.array([True, True, False, False]) | ||
| assert jnp.array_equal(result, expected) | ||
| else: | ||
| expected = jnp.array([False, False, True, True, False, True]) | ||
| assert jnp.array_equal(result, expected) | ||
|
|
||
|
|
||
| @pytest.mark.parametrize("axis", [None, 0, 1]) | ||
| def test_mean_var(jax_arr: Any, axis: Literal[0, 1] | None) -> None: | ||
| import jax.numpy as jnp | ||
|
|
||
| mean, var = stats.mean_var(jax_arr, axis=axis, correction=1) | ||
|
|
||
| mean_expected = jnp.mean(jax_arr, axis=axis) | ||
| n = jax_arr.size if axis is None else jax_arr.shape[axis] | ||
| var_expected = jnp.var(jax_arr, axis=axis) * n / (n - 1) | ||
|
|
||
| assert jnp.allclose(mean, mean_expected) | ||
| assert jnp.allclose(var, var_expected) | ||
|
|
||
|
|
||
| def test_to_dense(jax_arr: Any) -> None: | ||
| import jax.numpy as jnp | ||
|
|
||
| result = to_dense(jax_arr) | ||
| assert jnp.array_equal(result, jax_arr) | ||
|
|
||
|
|
||
| def test_to_dense_to_cpu(jax_arr: Any) -> None: | ||
| result = to_dense(jax_arr, to_cpu_memory=True) | ||
| assert isinstance(result, np.ndarray) | ||
| np.testing.assert_array_equal(result, np.asarray(jax_arr)) |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
you accidentally committed this