Skip to content

Commit 82c6318

Browse files
jahallJoseph Hall
and
Joseph Hall
authored
Add GP Wrapped Periodic Kernel (#6742)
Co-authored-by: Joseph Hall <[email protected]>
1 parent 4a65148 commit 82c6318

File tree

2 files changed

+130
-21
lines changed

2 files changed

+130
-21
lines changed

pymc/gp/cov.py

Lines changed: 92 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@
4141
"Cosine",
4242
"Periodic",
4343
"WarpedInput",
44+
"WrappedPeriodic",
4445
"Gibbs",
4546
"Coregion",
4647
"ScaledCov",
@@ -551,6 +552,11 @@ def diag(self, X: TensorLike) -> TensorVariable:
551552
return self._alloc(1.0, X.shape[0])
552553

553554
def full(self, X: TensorLike, Xs: Optional[TensorLike] = None) -> TensorVariable:
555+
X, Xs = self._slice(X, Xs)
556+
r2 = self.square_dist(X, Xs)
557+
return self.full_from_distance(r2, squared=True)
558+
559+
def full_from_distance(self, dist: TensorLike, squared: bool = False) -> TensorVariable:
554560
raise NotImplementedError
555561

556562
def power_spectral_density(self, omega: TensorLike) -> TensorVariable:
@@ -568,9 +574,8 @@ class ExpQuad(Stationary):
568574
569575
"""
570576

571-
def full(self, X: TensorLike, Xs: Optional[TensorLike] = None) -> TensorVariable:
572-
X, Xs = self._slice(X, Xs)
573-
r2 = self.square_dist(X, Xs)
577+
def full_from_distance(self, dist: TensorLike, squared: bool = False) -> TensorVariable:
578+
r2 = dist if squared else dist**2
574579
return pt.exp(-0.5 * r2)
575580

576581
def power_spectral_density(self, omega: TensorLike) -> TensorVariable:
@@ -609,9 +614,8 @@ def __init__(
609614
super().__init__(input_dim, ls, ls_inv, active_dims)
610615
self.alpha = alpha
611616

612-
def full(self, X: TensorLike, Xs: Optional[TensorLike] = None) -> TensorVariable:
613-
X, Xs = self._slice(X, Xs)
614-
r2 = self.square_dist(X, Xs)
617+
def full_from_distance(self, dist: TensorLike, squared: bool = False) -> TensorVariable:
618+
r2 = dist if squared else dist**2
615619
return pt.power(
616620
(1.0 + 0.5 * r2 * (1.0 / self.alpha)),
617621
-1.0 * self.alpha,
@@ -629,9 +633,8 @@ class Matern52(Stationary):
629633
\mathrm{exp}\left[ - \frac{\sqrt{5(x - x')^2}}{\ell} \right]
630634
"""
631635

632-
def full(self, X: TensorLike, Xs: Optional[TensorLike] = None) -> TensorVariable:
633-
X, Xs = self._slice(X, Xs)
634-
r = self.euclidean_dist(X, Xs)
636+
def full_from_distance(self, dist: TensorLike, squared: bool = False) -> TensorVariable:
637+
r = self._sqrt(dist) if squared else dist
635638
return (1.0 + np.sqrt(5.0) * r + 5.0 / 3.0 * pt.square(r)) * pt.exp(-1.0 * np.sqrt(5.0) * r)
636639

637640
def power_spectral_density(self, omega: TensorLike) -> TensorVariable:
@@ -669,9 +672,8 @@ class Matern32(Stationary):
669672
\mathrm{exp}\left[ - \frac{\sqrt{3(x - x')^2}}{\ell} \right]
670673
"""
671674

672-
def full(self, X: TensorLike, Xs: Optional[TensorLike] = None) -> TensorVariable:
673-
X, Xs = self._slice(X, Xs)
674-
r = self.euclidean_dist(X, Xs)
675+
def full_from_distance(self, dist: TensorLike, squared: bool = False) -> TensorVariable:
676+
r = self._sqrt(dist) if squared else dist
675677
return (1.0 + np.sqrt(3.0) * r) * pt.exp(-np.sqrt(3.0) * r)
676678

677679
def power_spectral_density(self, omega: TensorLike) -> TensorVariable:
@@ -708,9 +710,8 @@ class Matern12(Stationary):
708710
k(x, x') = \mathrm{exp}\left[ -\frac{(x - x')^2}{\ell} \right]
709711
"""
710712

711-
def full(self, X: TensorLike, Xs: Optional[TensorLike] = None) -> TensorVariable:
712-
X, Xs = self._slice(X, Xs)
713-
r = self.euclidean_dist(X, Xs)
713+
def full_from_distance(self, dist: TensorLike, squared: bool = False) -> TensorVariable:
714+
r = self._sqrt(dist) if squared else dist
714715
return pt.exp(-r)
715716

716717

@@ -723,9 +724,8 @@ class Exponential(Stationary):
723724
k(x, x') = \mathrm{exp}\left[ -\frac{||x - x'||}{2\ell} \right]
724725
"""
725726

726-
def full(self, X: TensorLike, Xs: Optional[TensorLike] = None) -> TensorVariable:
727-
X, Xs = self._slice(X, Xs)
728-
r = self.euclidean_dist(X, Xs)
727+
def full_from_distance(self, dist: TensorLike, squared: bool = False) -> TensorVariable:
728+
r = self._sqrt(dist) if squared else dist
729729
return pt.exp(-0.5 * r)
730730

731731

@@ -737,9 +737,8 @@ class Cosine(Stationary):
737737
k(x, x') = \mathrm{cos}\left( 2 \pi \frac{||x - x'||}{ \ell^2} \right)
738738
"""
739739

740-
def full(self, X: TensorLike, Xs: Optional[TensorLike] = None) -> TensorVariable:
741-
X, Xs = self._slice(X, Xs)
742-
r = self.euclidean_dist(X, Xs)
740+
def full_from_distance(self, dist: TensorLike, squared: bool = False) -> TensorVariable:
741+
r = self._sqrt(dist) if squared else dist
743742
return pt.cos(2.0 * np.pi * r)
744743

745744

@@ -781,6 +780,12 @@ def full(self, X: TensorLike, Xs: Optional[TensorLike] = None) -> TensorVariable
781780
f2 = pt.expand_dims(Xs, axis=(1,))
782781
r = np.pi * (f1 - f2) / self.period
783782
r2 = pt.sum(pt.square(pt.sin(r) / self.ls), 2)
783+
return self.full_from_distance(r2, squared=True)
784+
785+
def full_from_distance(self, dist: TensorLike, squared: bool = False) -> TensorVariable:
786+
# NOTE: This is the same as the ExpQuad as we assume the periodicity
787+
# has already been accounted for in the distance
788+
r2 = dist if squared else dist**2
784789
return pt.exp(-0.5 * r2)
785790

786791

@@ -882,6 +887,72 @@ def diag(self, X: TensorLike) -> TensorVariable:
882887
return self.cov_func(self.w(X, self.args), diag=True)
883888

884889

890+
class WrappedPeriodic(Covariance):
891+
r"""
892+
The `WrappedPeriodic` kernel constructs periodic kernels from any `Stationary` kernel.
893+
894+
This is done by warping the input with the function
895+
896+
.. math::
897+
\mathbf{u}(x) = \left(
898+
\mathrm{sin} \left( \frac{2\pi x}{T} \right),
899+
\mathrm{cos} \left( \frac{2\pi x}{T} \right)
900+
\right)
901+
902+
The original derivation as applied to the squared exponential kernel can
903+
be found in [1] or referenced in Chapter 4, page 92 of [2].
904+
905+
Notes
906+
-----
907+
Note that we drop the resulting scaling by 4 found in the original derivation
908+
so that the interpretation of the length scales is consistent between the
909+
underlying kernel and the periodic version.
910+
911+
Examples
912+
--------
913+
In order to construct a kernel equivalent to the `Periodic` kernel you
914+
can do the following (though using `Periodic` will likely be a bit faster):
915+
916+
.. code:: python
917+
918+
exp_quad = pm.gp.cov.ExpQuad(1, ls=0.5)
919+
cov = pm.gp.cov.WrappedPeriodic(exp_quad, period=5)
920+
921+
References
922+
----------
923+
.. [1] MacKay, D. J. C. (1998). Introduction to Gaussian Processes.
924+
In Bishop, C. M., editor, Neural Networks and Machine Learning. Springer-Verlag
925+
.. [2] Rasmussen, C. E., & Williams, C. K. I. (2006). Gaussian processes for machine learning. MIT Press.
926+
http://gaussianprocess.org/gpml/chapters/
927+
928+
Parameters
929+
----------
930+
cov_func: Stationary
931+
Base kernel or covariance function
932+
period: Period
933+
"""
934+
935+
def __init__(self, cov_func: Stationary, period):
936+
super().__init__(cov_func.input_dim, cov_func.active_dims)
937+
if not isinstance(cov_func, Stationary):
938+
raise TypeError("Must inherit from the Stationary class")
939+
self.cov_func = cov_func
940+
self.period = period
941+
942+
def full(self, X: TensorLike, Xs: Optional[TensorLike] = None) -> TensorVariable:
943+
X, Xs = self._slice(X, Xs)
944+
if Xs is None:
945+
Xs = X
946+
f1 = pt.expand_dims(X, axis=(0,))
947+
f2 = pt.expand_dims(Xs, axis=(1,))
948+
r = np.pi * (f1 - f2) / self.period
949+
r2 = pt.sum(pt.square(pt.sin(r) / self.cov_func.ls), 2)
950+
return self.cov_func.full_from_distance(r2, squared=True)
951+
952+
def diag(self, X: TensorLike) -> TensorVariable:
953+
return self._alloc(1.0, X.shape[0])
954+
955+
885956
class Gibbs(Covariance):
886957
r"""
887958
The Gibbs kernel. Use an arbitrary lengthscale function defined

tests/gp/test_cov.py

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -468,6 +468,22 @@ def test_psd(self):
468468
)
469469
npt.assert_allclose(true_1d_psd, test_1d_psd, atol=1e-5)
470470

471+
def test_euclidean_dist(self):
472+
X = np.arange(0, 3)[:, None]
473+
Xs = np.arange(1, 4)[:, None]
474+
with pm.Model():
475+
cov = pm.gp.cov.ExpQuad(1, ls=1)
476+
result = cov.euclidean_dist(X, Xs).eval()
477+
expected = np.array(
478+
[
479+
[1, 2, 3],
480+
[0, 1, 2],
481+
[1, 0, 1],
482+
]
483+
)
484+
print(result, expected)
485+
npt.assert_allclose(result, expected, atol=1e-5)
486+
471487

472488
class TestWhiteNoise:
473489
def test_1d(self):
@@ -678,6 +694,28 @@ def test_raises(self):
678694
pm.gp.cov.WarpedInput(1, "str is not Covariance object", lambda x: x)
679695

680696

697+
class TestWrappedPeriodic:
698+
def test_1d(self):
699+
X = np.linspace(0, 1, 10)[:, None]
700+
with pm.Model():
701+
cov1 = pm.gp.cov.Periodic(1, ls=0.2, period=1)
702+
cov2 = pm.gp.cov.WrappedPeriodic(
703+
cov_func=pm.gp.cov.ExpQuad(1, ls=0.2),
704+
period=1,
705+
)
706+
K1 = cov1(X).eval()
707+
K2 = cov2(X).eval()
708+
npt.assert_allclose(K1, K2, atol=1e-3)
709+
K1d = cov1(X, diag=True).eval()
710+
K2d = cov2(X, diag=True).eval()
711+
npt.assert_allclose(K1d, K2d, atol=1e-3)
712+
713+
def test_raises(self):
714+
lin_cov = pm.gp.cov.Linear(1, c=1)
715+
with pytest.raises(TypeError, match="Must inherit from the Stationary class"):
716+
pm.gp.cov.WrappedPeriodic(lin_cov, period=1)
717+
718+
681719
class TestGibbs:
682720
def test_1d(self):
683721
X = np.linspace(0, 2, 10)[:, None]

0 commit comments

Comments
 (0)