|
| 1 | +import numpy as np |
| 2 | +import pytest |
| 3 | + |
| 4 | +from pytensor import config |
| 5 | +from pytensor.graph import FunctionGraph |
| 6 | +from pytensor.tensor import tensor |
| 7 | +from pytensor.tensor.blockwise import Blockwise |
| 8 | +from pytensor.tensor.math import Dot, matmul |
| 9 | +from tests.link.jax.test_basic import compare_jax_and_py |
| 10 | +from tests.tensor.test_blockwise import check_blockwise_runtime_broadcasting |
| 11 | + |
| 12 | + |
| 13 | +jax = pytest.importorskip("jax") |
| 14 | + |
| 15 | + |
| 16 | +def test_runtime_broadcasting(): |
| 17 | + check_blockwise_runtime_broadcasting("JAX") |
| 18 | + |
| 19 | + |
| 20 | +# Equivalent blockwise to matmul but with dumb signature |
| 21 | +odd_matmul = Blockwise(Dot(), signature="(i00,i01),(i10,i11)->(o00,o01)") |
| 22 | + |
| 23 | + |
| 24 | +@pytest.mark.parametrize("matmul_op", (matmul, odd_matmul)) |
| 25 | +def test_matmul(matmul_op): |
| 26 | + rng = np.random.default_rng(14) |
| 27 | + a = tensor("a", shape=(2, 3, 5)) |
| 28 | + b = tensor("b", shape=(2, 5, 3)) |
| 29 | + test_values = [ |
| 30 | + rng.normal(size=(inp.type.shape)).astype(config.floatX) for inp in (a, b) |
| 31 | + ] |
| 32 | + |
| 33 | + out = matmul_op(a, b) |
| 34 | + assert isinstance(out.owner.op, Blockwise) |
| 35 | + fg = FunctionGraph([a, b], [out]) |
| 36 | + fn, _ = compare_jax_and_py(fg, test_values) |
| 37 | + |
| 38 | + # Check we are not adding any unnecessary stuff |
| 39 | + jaxpr = str(jax.make_jaxpr(fn.vm.jit_fn)(*test_values)) |
| 40 | + jaxpr = jaxpr.replace("name=jax_funcified_fgraph", "name=matmul") |
| 41 | + expected_jaxpr = str(jax.make_jaxpr(jax.jit(jax.numpy.matmul))(*test_values)) |
| 42 | + assert jaxpr == expected_jaxpr |
0 commit comments