|
1 | 1 | import numpy as np
|
2 | 2 | import pytest
|
| 3 | +from scipy.special import factorial as scipy_factorial |
3 | 4 | from scipy.special import log_softmax as scipy_log_softmax
|
| 5 | +from scipy.special import poch as scipy_poch |
4 | 6 | from scipy.special import softmax as scipy_softmax
|
5 | 7 |
|
6 | 8 | from pytensor.compile.function import function
|
|
9 | 11 | LogSoftmax,
|
10 | 12 | Softmax,
|
11 | 13 | SoftmaxGrad,
|
| 14 | + factorial, |
12 | 15 | log_softmax,
|
| 16 | + poch, |
13 | 17 | softmax,
|
14 | 18 | )
|
15 |
| -from pytensor.tensor.type import matrix, tensor3, tensor4, vector |
| 19 | +from pytensor.tensor.type import matrix, tensor3, tensor4, vector, vectors |
16 | 20 | from tests import unittest_tools as utt
|
| 21 | +from tests.tensor.utils import random_ranged |
17 | 22 |
|
18 | 23 |
|
19 | 24 | class TestSoftmax(utt.InferShapeTester):
|
@@ -140,3 +145,29 @@ def test_valid_axis(self):
|
140 | 145 |
|
141 | 146 | with pytest.raises(ValueError):
|
142 | 147 | SoftmaxGrad(-4)(*x)
|
| 148 | + |
| 149 | + |
| 150 | +def test_poch(): |
| 151 | + _z, _m = vectors("z", "m") |
| 152 | + actual_fn = function([_z, _m], poch(_z, _m)) |
| 153 | + |
| 154 | + z = random_ranged(0, 5, (2,)) |
| 155 | + m = random_ranged(0, 5, (2,)) |
| 156 | + actual = actual_fn(z, m) |
| 157 | + expected = scipy_poch(z, m) |
| 158 | + np.testing.assert_allclose( |
| 159 | + actual, expected, rtol=1e-7 if config.floatX == "float64" else 1e-5 |
| 160 | + ) |
| 161 | + |
| 162 | + |
| 163 | +@pytest.mark.parametrize("n", random_ranged(0, 5, (1,))) |
| 164 | +def test_factorial(n): |
| 165 | + _n = vector("n") |
| 166 | + actual_fn = function([_n], factorial(_n)) |
| 167 | + |
| 168 | + n = random_ranged(0, 5, (2,)) |
| 169 | + actual = actual_fn(n) |
| 170 | + expected = scipy_factorial(n) |
| 171 | + np.testing.assert_allclose( |
| 172 | + actual, expected, rtol=1e-7 if config.floatX == "float64" else 1e-5 |
| 173 | + ) |
0 commit comments