NeuralEngine
A Game Engine with embeded Machine Learning algorithms based on Gaussian Processes.
NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar > Class Template Reference

Likelihood estimation based on Probit distribution. More...

#include <FgProbitLikelihoodLayer.h>

Inheritance diagram for NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >:
Collaboration diagram for NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >:

Public Member Functions

 ProbitLikLayer (int numPoints, int outputDim)
 Constructor. More...
 
Scalar ComputeLogZ (const af::array &mout, const af::array &vout, const af::array &y, Scalar alpha=1.0, af::array *dlogZ_dm=nullptr, af::array *dlogZ_dv=nullptr, af::array *dlogZ_dm2=nullptr)
 Calculates the logZ. More...
 
void ComputeLogZGradients (const af::array &mout, const af::array &vout, const af::array &y, af::array *dlogZ_dm=nullptr, af::array *dlogZ_dv=nullptr, af::array *dlogZ_dm2=nullptr, Scalar alpha=1.0)
 Calculates the logZ gradients. More...
 
Scalar ComputeLogLikExp (const af::array &mout, const af::array &vout, const af::array &y)
 
void ComputeLogLikExpGradients (const af::array &mout, const af::array &vout, const af::array &y, af::array *de_dm=nullptr, af::array *de_dv=nullptr)
 
Scalar BackpropagationGradientsLogLikExp (const af::array &mout, const af::array &vout, af::array &dmout, af::array &dvout, af::array &y, Scalar scale=1.0)
 
Scalar BackpropagationGradients (const af::array &mout, const af::array &vout, af::array &dmout, af::array &dvout, Scalar alpha=1.0, Scalar scale=1.0)
 
void ProbabilisticOutput (const af::array &mf, const af::array &vf, af::array &myOut, af::array &vyOut, Scalar alpha=1.0f)
 
- Public Member Functions inherited from NeuralEngine::MachineLearning::GPModels::LikelihoodBaseLayer< Scalar >
 LikelihoodBaseLayer (LogLikType type, int numPoints, int outputDim)
 Constructor. More...
 
virtual Scalar ComputeLogZ (const af::array &mout, const af::array &vout, const af::array &y, Scalar alpha=1.0, af::array *dlogZ_dm=nullptr, af::array *dlogZ_dv=nullptr, af::array *dlogZ_dm2=nullptr)=0
 Calculates the logZ. More...
 
virtual void ComputeLogZGradients (const af::array &mout, const af::array &vout, const af::array &y, af::array *dlogZ_dm=nullptr, af::array *dlogZ_dv=nullptr, af::array *dlogZ_dm2=nullptr, Scalar alpha=1.0)=0
 Calculates logZ gradients. More...
 
virtual Scalar BackpropagationGradients (const af::array &mout, const af::array &vout, af::array &dmout, af::array &dvout, Scalar alpha=1.0, Scalar scale=1.0)=0
 
virtual Scalar ComputeLogLikExp (const af::array &mout, const af::array &vout, const af::array &y)=0
 
virtual void ComputeLogLikExpGradients (const af::array &mout, const af::array &vout, const af::array &y, af::array *de_dm=nullptr, af::array *de_dv=nullptr)=0
 
virtual Scalar BackpropagationGradientsLogLikExp (const af::array &mout, const af::array &vout, af::array &dmout, af::array &dvout, af::array &y, Scalar scale=1.0)=0
 
virtual void ProbabilisticOutput (const af::array &mf, const af::array &vf, af::array &myOut, af::array &vyOut, Scalar alpha=1.0f)=0
 
virtual Scalar InitParameters ()
 
LogLikType GetLogLikType ()
 Gets log likelihood type. More...
 
virtual int GetNumParameters ()
 Gets number of parameters to be optimized. More...
 
virtual void SetParameters (const af::array &param)
 Sets the parameters for each optimization iteration. More...
 
virtual af::array GetParameters ()
 Gets the parameters for each optimization iteration. More...
 
virtual void FixParameters (bool isfixed)
 Set to fix the parameters or not for optimization. More...
 
virtual void UpdateParameters ()
 Updates the parameters. More...
 
- Public Member Functions inherited from NeuralEngine::MachineLearning::ILayer< Scalar >
 ILayer (LayerType type, int numPoints, int outputDim)
 Constructor. More...
 
virtual ~ILayer ()=default
 Destructor. More...
 
LayerType GetType ()
 Gets the layer type. More...
 
virtual int GetNumParameters ()=0
 Gets number of parameters to be optimized. More...
 
virtual void SetParameters (const af::array &param)=0
 Sets the parameters for each optimization iteration. More...
 
virtual af::array GetParameters ()=0
 Gets the parameters for each optimization iteration. More...
 
virtual void UpdateParameters ()=0
 Updates the parameters. More...
 
virtual void SetDataSize (int length, int dimension)
 Sets data size. More...
 

Protected Attributes

af::array afGHx
 
af::array afGHw
 
- Protected Attributes inherited from NeuralEngine::MachineLearning::GPModels::LikelihoodBaseLayer< Scalar >
bool isFixedParam
 
bool bDimMod
 
- Protected Attributes inherited from NeuralEngine::MachineLearning::ILayer< Scalar >
int iD
 data dimension More...
 
int iN
 data size More...
 
LayerType lType
 liklihood or gp layer More...
 
af::dtype m_dType
 floating point precision flag for af::array More...
 

Private Member Functions

template<class Archive >
void serialize (Archive &ar, unsigned int version)
 

Friends

class boost::serialization::access
 

Additional Inherited Members

- Protected Member Functions inherited from NeuralEngine::MachineLearning::GPModels::LikelihoodBaseLayer< Scalar >
 LikelihoodBaseLayer ()
 Default constructor. More...
 
- Protected Member Functions inherited from NeuralEngine::MachineLearning::ILayer< Scalar >
 ILayer ()
 Default constructor. More...
 

Detailed Description

template<typename Scalar>
class NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >

Likelihood estimation based on Probit distribution.


Approximating an unnormalized distribution means to replace it by a much simpler parametric distribution. This is often needed for untractable conditionals or integrals. This can be done via EP, PowerEP or VFE. The algorithms are based on minimization of Kullback-Leiber-Divergence and presuppose the distribution Q is restricted to belong to a family of probability distributions that is closed under the product operation. This is the exponential family:

 Q(x ∣ θ) = exp⁡(η(θ) * T(x) − A(θ)),

where η(θ) are the natural parameters, T(x) the sufficient statistics and A(θ) the log- normalizer, also known as logZ. Exponential family facilitates the parameter updates in each iteration step, because it needes just the first and second derivatives of logZ.

For a Gaussian distribution N(x|μ, σ^2) = 1/sqrt(2πσ^2) exp{−1/(2σ^2)(x−μ)^2} the ex- ponential family parameters are:

η(θ) = (μ / σ^2, −1/(2σ^2))^T,
T(x) = (x, x^2)^T,
A(θ) = 1/2 log(π/−η_2 − η_1^2/(4η_2).

Hmetal T, 04/05/2018.

Definition at line 50 of file FgProbitLikelihoodLayer.h.

Constructor & Destructor Documentation

◆ ProbitLikLayer() [1/2]

template<typename Scalar >
NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >::ProbitLikLayer ( int  numPoints,
int  outputDim 
)

Constructor.

, 23.04.2018.

Parameters
numPointsNumber of Samples.
outputDimNumber of Dimensions.

◆ ProbitLikLayer() [2/2]

template<typename Scalar >
NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >::ProbitLikLayer ( )
inlineprotected

Definition at line 105 of file FgProbitLikelihoodLayer.h.

Member Function Documentation

◆ ComputeLogZ()

template<typename Scalar >
Scalar NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >::ComputeLogZ ( const af::array &  mout,
const af::array &  vout,
const af::array &  y,
Scalar  alpha = 1.0,
af::array *  dlogZ_dm = nullptr,
af::array *  dlogZ_dv = nullptr,
af::array *  dlogZ_dm2 = nullptr 
)
virtual

Calculates the logZ.

Hmetal T, 05/05/2018.

Parameters
mout[in,out] The mean function.
voutThe covarianve function.
y[in,out] Sample data vector.
alpha(Optional) Weight between alpha- and KL-divergence.
Returns
The calculated log z coordinate.

Implements NeuralEngine::MachineLearning::GPModels::LikelihoodBaseLayer< Scalar >.

◆ ComputeLogZGradients()

template<typename Scalar >
void NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >::ComputeLogZGradients ( const af::array &  mout,
const af::array &  vout,
const af::array &  y,
af::array *  dlogZ_dm = nullptr,
af::array *  dlogZ_dv = nullptr,
af::array *  dlogZ_dm2 = nullptr,
Scalar  alpha = 1.0 
)
virtual

Calculates the logZ gradients.

Hmetal T, 05/05/2018.

Parameters
mout[in,out] The mean.
vout[in,out] The standart deviation.
y[in,out] Sample data vector.
dlogZ_dm[in,out] (Optional) If non-null, derivative of logZ w.r.t mean.
dlogZ_dv[in,out] (Optional) If non-null, derivative of logZ w.r.t standart deviation.
dlogZ_dm2[in,out] (Optional) If non-null, derivative of logZ w.r.t mean^2.

Implements NeuralEngine::MachineLearning::GPModels::LikelihoodBaseLayer< Scalar >.

◆ ComputeLogLikExp()

template<typename Scalar >
Scalar NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >::ComputeLogLikExp ( const af::array &  mout,
const af::array &  vout,
const af::array &  y 
)
virtual

◆ ComputeLogLikExpGradients()

template<typename Scalar >
void NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >::ComputeLogLikExpGradients ( const af::array &  mout,
const af::array &  vout,
const af::array &  y,
af::array *  de_dm = nullptr,
af::array *  de_dv = nullptr 
)
virtual

◆ BackpropagationGradientsLogLikExp()

template<typename Scalar >
Scalar NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >::BackpropagationGradientsLogLikExp ( const af::array &  mout,
const af::array &  vout,
af::array &  dmout,
af::array &  dvout,
af::array &  y,
Scalar  scale = 1.0 
)
virtual

◆ BackpropagationGradients()

template<typename Scalar >
Scalar NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >::BackpropagationGradients ( const af::array &  mout,
const af::array &  vout,
af::array &  dmout,
af::array &  dvout,
Scalar  alpha = 1.0,
Scalar  scale = 1.0 
)
virtual

◆ ProbabilisticOutput()

template<typename Scalar >
void NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >::ProbabilisticOutput ( const af::array &  mf,
const af::array &  vf,
af::array &  myOut,
af::array &  vyOut,
Scalar  alpha = 1.0f 
)
virtual

◆ serialize()

template<typename Scalar >
template<class Archive >
void NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >::serialize ( Archive &  ar,
unsigned int  version 
)
inlineprivate

Definition at line 115 of file FgProbitLikelihoodLayer.h.

Friends And Related Function Documentation

◆ boost::serialization::access

template<typename Scalar >
friend class boost::serialization::access
friend

Definition at line 112 of file FgProbitLikelihoodLayer.h.

Member Data Documentation

◆ afGHx

template<typename Scalar >
af::array NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >::afGHx
protected

Definition at line 108 of file FgProbitLikelihoodLayer.h.

◆ afGHw

template<typename Scalar >
af::array NeuralEngine::MachineLearning::GPModels::ProbitLikLayer< Scalar >::afGHw
protected

Definition at line 109 of file FgProbitLikelihoodLayer.h.


The documentation for this class was generated from the following files: