41
41
"Cosine" ,
42
42
"Periodic" ,
43
43
"WarpedInput" ,
44
+ "WrappedPeriodic" ,
44
45
"Gibbs" ,
45
46
"Coregion" ,
46
47
"ScaledCov" ,
@@ -551,6 +552,11 @@ def diag(self, X: TensorLike) -> TensorVariable:
551
552
return self ._alloc (1.0 , X .shape [0 ])
552
553
553
554
def full (self , X : TensorLike , Xs : Optional [TensorLike ] = None ) -> TensorVariable :
555
+ X , Xs = self ._slice (X , Xs )
556
+ r2 = self .square_dist (X , Xs )
557
+ return self .full_from_distance (r2 , squared = True )
558
+
559
+ def full_from_distance (self , dist : TensorLike , squared : bool = False ) -> TensorVariable :
554
560
raise NotImplementedError
555
561
556
562
def power_spectral_density (self , omega : TensorLike ) -> TensorVariable :
@@ -568,9 +574,8 @@ class ExpQuad(Stationary):
568
574
569
575
"""
570
576
571
- def full (self , X : TensorLike , Xs : Optional [TensorLike ] = None ) -> TensorVariable :
572
- X , Xs = self ._slice (X , Xs )
573
- r2 = self .square_dist (X , Xs )
577
+ def full_from_distance (self , dist : TensorLike , squared : bool = False ) -> TensorVariable :
578
+ r2 = dist if squared else dist ** 2
574
579
return pt .exp (- 0.5 * r2 )
575
580
576
581
def power_spectral_density (self , omega : TensorLike ) -> TensorVariable :
@@ -609,9 +614,8 @@ def __init__(
609
614
super ().__init__ (input_dim , ls , ls_inv , active_dims )
610
615
self .alpha = alpha
611
616
612
- def full (self , X : TensorLike , Xs : Optional [TensorLike ] = None ) -> TensorVariable :
613
- X , Xs = self ._slice (X , Xs )
614
- r2 = self .square_dist (X , Xs )
617
+ def full_from_distance (self , dist : TensorLike , squared : bool = False ) -> TensorVariable :
618
+ r2 = dist if squared else dist ** 2
615
619
return pt .power (
616
620
(1.0 + 0.5 * r2 * (1.0 / self .alpha )),
617
621
- 1.0 * self .alpha ,
@@ -629,9 +633,8 @@ class Matern52(Stationary):
629
633
\mathrm{exp}\left[ - \frac{\sqrt{5(x - x')^2}}{\ell} \right]
630
634
"""
631
635
632
- def full (self , X : TensorLike , Xs : Optional [TensorLike ] = None ) -> TensorVariable :
633
- X , Xs = self ._slice (X , Xs )
634
- r = self .euclidean_dist (X , Xs )
636
+ def full_from_distance (self , dist : TensorLike , squared : bool = False ) -> TensorVariable :
637
+ r = self ._sqrt (dist ) if squared else dist
635
638
return (1.0 + np .sqrt (5.0 ) * r + 5.0 / 3.0 * pt .square (r )) * pt .exp (- 1.0 * np .sqrt (5.0 ) * r )
636
639
637
640
def power_spectral_density (self , omega : TensorLike ) -> TensorVariable :
@@ -669,9 +672,8 @@ class Matern32(Stationary):
669
672
\mathrm{exp}\left[ - \frac{\sqrt{3(x - x')^2}}{\ell} \right]
670
673
"""
671
674
672
- def full (self , X : TensorLike , Xs : Optional [TensorLike ] = None ) -> TensorVariable :
673
- X , Xs = self ._slice (X , Xs )
674
- r = self .euclidean_dist (X , Xs )
675
+ def full_from_distance (self , dist : TensorLike , squared : bool = False ) -> TensorVariable :
676
+ r = self ._sqrt (dist ) if squared else dist
675
677
return (1.0 + np .sqrt (3.0 ) * r ) * pt .exp (- np .sqrt (3.0 ) * r )
676
678
677
679
def power_spectral_density (self , omega : TensorLike ) -> TensorVariable :
@@ -708,9 +710,8 @@ class Matern12(Stationary):
708
710
k(x, x') = \mathrm{exp}\left[ -\frac{(x - x')^2}{\ell} \right]
709
711
"""
710
712
711
- def full (self , X : TensorLike , Xs : Optional [TensorLike ] = None ) -> TensorVariable :
712
- X , Xs = self ._slice (X , Xs )
713
- r = self .euclidean_dist (X , Xs )
713
+ def full_from_distance (self , dist : TensorLike , squared : bool = False ) -> TensorVariable :
714
+ r = self ._sqrt (dist ) if squared else dist
714
715
return pt .exp (- r )
715
716
716
717
@@ -723,9 +724,8 @@ class Exponential(Stationary):
723
724
k(x, x') = \mathrm{exp}\left[ -\frac{||x - x'||}{2\ell} \right]
724
725
"""
725
726
726
- def full (self , X : TensorLike , Xs : Optional [TensorLike ] = None ) -> TensorVariable :
727
- X , Xs = self ._slice (X , Xs )
728
- r = self .euclidean_dist (X , Xs )
727
+ def full_from_distance (self , dist : TensorLike , squared : bool = False ) -> TensorVariable :
728
+ r = self ._sqrt (dist ) if squared else dist
729
729
return pt .exp (- 0.5 * r )
730
730
731
731
@@ -737,9 +737,8 @@ class Cosine(Stationary):
737
737
k(x, x') = \mathrm{cos}\left( 2 \pi \frac{||x - x'||}{ \ell^2} \right)
738
738
"""
739
739
740
- def full (self , X : TensorLike , Xs : Optional [TensorLike ] = None ) -> TensorVariable :
741
- X , Xs = self ._slice (X , Xs )
742
- r = self .euclidean_dist (X , Xs )
740
+ def full_from_distance (self , dist : TensorLike , squared : bool = False ) -> TensorVariable :
741
+ r = self ._sqrt (dist ) if squared else dist
743
742
return pt .cos (2.0 * np .pi * r )
744
743
745
744
@@ -781,6 +780,12 @@ def full(self, X: TensorLike, Xs: Optional[TensorLike] = None) -> TensorVariable
781
780
f2 = pt .expand_dims (Xs , axis = (1 ,))
782
781
r = np .pi * (f1 - f2 ) / self .period
783
782
r2 = pt .sum (pt .square (pt .sin (r ) / self .ls ), 2 )
783
+ return self .full_from_distance (r2 , squared = True )
784
+
785
+ def full_from_distance (self , dist : TensorLike , squared : bool = False ) -> TensorVariable :
786
+ # NOTE: This is the same as the ExpQuad as we assume the periodicity
787
+ # has already been accounted for in the distance
788
+ r2 = dist if squared else dist ** 2
784
789
return pt .exp (- 0.5 * r2 )
785
790
786
791
@@ -882,6 +887,72 @@ def diag(self, X: TensorLike) -> TensorVariable:
882
887
return self .cov_func (self .w (X , self .args ), diag = True )
883
888
884
889
890
+ class WrappedPeriodic (Covariance ):
891
+ r"""
892
+ The `WrappedPeriodic` kernel constructs periodic kernels from any `Stationary` kernel.
893
+
894
+ This is done by warping the input with the function
895
+
896
+ .. math::
897
+ \mathbf{u}(x) = \left(
898
+ \mathrm{sin} \left( \frac{2\pi x}{T} \right),
899
+ \mathrm{cos} \left( \frac{2\pi x}{T} \right)
900
+ \right)
901
+
902
+ The original derivation as applied to the squared exponential kernel can
903
+ be found in [1] or referenced in Chapter 4, page 92 of [2].
904
+
905
+ Notes
906
+ -----
907
+ Note that we drop the resulting scaling by 4 found in the original derivation
908
+ so that the interpretation of the length scales is consistent between the
909
+ underlying kernel and the periodic version.
910
+
911
+ Examples
912
+ --------
913
+ In order to construct a kernel equivalent to the `Periodic` kernel you
914
+ can do the following (though using `Periodic` will likely be a bit faster):
915
+
916
+ .. code:: python
917
+
918
+ exp_quad = pm.gp.cov.ExpQuad(1, ls=0.5)
919
+ cov = pm.gp.cov.WrappedPeriodic(exp_quad, period=5)
920
+
921
+ References
922
+ ----------
923
+ .. [1] MacKay, D. J. C. (1998). Introduction to Gaussian Processes.
924
+ In Bishop, C. M., editor, Neural Networks and Machine Learning. Springer-Verlag
925
+ .. [2] Rasmussen, C. E., & Williams, C. K. I. (2006). Gaussian processes for machine learning. MIT Press.
926
+ http://gaussianprocess.org/gpml/chapters/
927
+
928
+ Parameters
929
+ ----------
930
+ cov_func: Stationary
931
+ Base kernel or covariance function
932
+ period: Period
933
+ """
934
+
935
+ def __init__ (self , cov_func : Stationary , period ):
936
+ super ().__init__ (cov_func .input_dim , cov_func .active_dims )
937
+ if not isinstance (cov_func , Stationary ):
938
+ raise TypeError ("Must inherit from the Stationary class" )
939
+ self .cov_func = cov_func
940
+ self .period = period
941
+
942
+ def full (self , X : TensorLike , Xs : Optional [TensorLike ] = None ) -> TensorVariable :
943
+ X , Xs = self ._slice (X , Xs )
944
+ if Xs is None :
945
+ Xs = X
946
+ f1 = pt .expand_dims (X , axis = (0 ,))
947
+ f2 = pt .expand_dims (Xs , axis = (1 ,))
948
+ r = np .pi * (f1 - f2 ) / self .period
949
+ r2 = pt .sum (pt .square (pt .sin (r ) / self .cov_func .ls ), 2 )
950
+ return self .cov_func .full_from_distance (r2 , squared = True )
951
+
952
+ def diag (self , X : TensorLike ) -> TensorVariable :
953
+ return self ._alloc (1.0 , X .shape [0 ])
954
+
955
+
885
956
class Gibbs (Covariance ):
886
957
r"""
887
958
The Gibbs kernel. Use an arbitrary lengthscale function defined
0 commit comments