@@ -34,70 +34,84 @@ const FactorAnalysisResultType = MS.FactorAnalysis
3434const default_kernel = (x, y) -> x' y # default kernel used in KernelPCA
3535
3636# Definitions of model descriptions for use in model doc-strings.
37- const PCA_DESCR = """ Principal component analysis. Learns a linear transformation to
38- project the data on a lower dimensional space while preserving most of the initial
39- variance.
40- """
37+ const PCA_DESCR = """
38+ Principal component analysis. Learns a linear transformation to
39+ project the data on a lower dimensional space while preserving most of the initial
40+ variance.
41+ """
4142const KPCA_DESCR = " Kernel principal component analysis."
4243const ICA_DESCR = " Independent component analysis."
4344const PPCA_DESCR = " Probabilistic principal component analysis"
4445const FactorAnalysis_DESCR = " Factor Analysis"
45- const LDA_DESCR = """ Multiclass linear discriminant analysis. The algorithm learns a
46- projection matrix `P` that projects a feature matrix `Xtrain` onto a lower dimensional
47- space of dimension `out_dim` such that the trace of the transformed between-class scatter
48- matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the transformed within-class
49- scatter matrix (`Pᵀ*Sw*P`).The projection matrix is scaled such that `Pᵀ*Sw*P=I` or
50- `Pᵀ*Σw*P=I`(where `Σw` is the within-class covariance matrix) .
51- Predicted class posterior probability for feature matrix `Xtest` are derived by applying
52- a softmax transformationto a matrix `Pr`, such that rowᵢ of `Pr` contains computed
53- distances(based on a distance metric) in the transformed space of rowᵢ in `Xtest` to the
54- centroid of each class.
55- """
56- const BayesianLDA_DESCR = """ Bayesian Multiclass linear discriminant analysis. The algorithm
57- learns a projection matrix `P` that projects a feature matrix `Xtrain` onto a lower
58- dimensional space of dimension `out_dim` such that the trace of the transformed
59- between-class scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the
60- transformed within-class scatter matrix (`Pᵀ*Sw*P`). The projection matrix is scaled such
61- that `Pᵀ*Sw*P = n` or `Pᵀ*Σw*P=I` (Where `n` is the number of training samples and `Σw`
62- is the within-class covariance matrix).
63- Predicted class posterior probability distibution are derived by applying Bayes rule with
64- a multivariate Gaussian class-conditional distribution.
65- """
66- const SubspaceLDA_DESCR = """ Multiclass linear discriminant analysis. Suitable for high
67- dimensional data (Avoids computing scatter matrices `Sw` ,`Sb`). The algorithm learns a
68- projection matrix `P = W*L` that projects a feature matrix `Xtrain` onto a lower
69- dimensional space of dimension `nc - 1` such that the trace of the transformed
70- between-class scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the
71- transformed within-class scatter matrix (`Pᵀ*Sw*P`). The projection matrix is scaled such
72- that `Pᵀ*Sw*P = mult*I` or `Pᵀ*Σw*P=mult/(n-nc)*I` (where `n` is the number of training
73- samples, mult` is one of `n` or `1` depending on whether `Sb` is normalized, `Σw` is the
74- within-class covariance matrix, and `nc` is the number of unique classes in `y`) and also
75- obeys `Wᵀ*Sb*p = λ*Wᵀ*Sw*p`, for every column `p` in `P`.
76- Predicted class posterior probability for feature matrix `Xtest` are derived by applying a
77- softmax transformation to a matrix `Pr`, such that rowᵢ of `Pr` contains computed
78- distances(based on a distance metric) in the transformed space of rowᵢ in `Xtest` to the
79- centroid of each class.
80- """
81- const BayesianSubspaceLDA_DESCR = """ Bayesian Multiclass linear discriminant analysis.
82- Suitable for high dimensional data (Avoids computing scatter matrices `Sw` ,`Sb`). The
83- algorithm learns a projection matrix `P = W*L` (`Sw`), that projects a feature matrix
84- `Xtrain` onto a lower dimensional space of dimension `nc-1` such that the trace of the
85- transformed between-class scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace
86- of the transformed within-class scatter matrix (`Pᵀ*Sw*P`). The projection matrix is
87- scaled such that `Pᵀ*Sw*P = mult*I` or `Pᵀ*Σw*P=mult/(n-nc)*I` (where `n` is the number of
88- training samples, `mult` is one of `n` or `1` depending on whether `Sb` is normalized,
89- `Σw` is the within-class covariance matrix, and `nc` is the number of unique classes in
90- `y`) and also obeys `Wᵀ*Sb*p = λ*Wᵀ*Sw*p`, for every column `p` in `P`.
91- Posterior class probability distibution are derived by applying Bayes rule with a
92- multivariate Gaussian class-conditional distribution
93- """
94- const LINEAR_DESCR = """ Linear regression. Learns a linear combination(s) of given
95- variables to fit the responses by minimizing the squared error between.
96- """
97- const RIDGE_DESCR = """ Ridge regressor with regularization parameter lambda. Learns a
98- linear regression with a penalty on the l2 norm of the coefficients.
99- """
100-
46+ const LDA_DESCR = """
47+ Multiclass linear discriminant analysis. The algorithm learns a
48+ projection matrix `P` that projects a feature matrix `Xtrain` onto a lower dimensional
49+ space of dimension `out_dim` such that the trace of the transformed between-class
50+ scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the transformed
51+ within-class scatter matrix (`Pᵀ*Sw*P`).The projection matrix is scaled such that
52+ `Pᵀ*Sw*P=I` or `Pᵀ*Σw*P=I`(where `Σw` is the within-class covariance matrix) .
53+ Predicted class posterior probability for feature matrix `Xtest` are derived by
54+ applying a softmax transformationto a matrix `Pr`, such that rowᵢ of `Pr` contains
55+ computed distances(based on a distance metric) in the transformed space of rowᵢ in
56+ `Xtest` to the centroid of each class.
57+ """
58+ const BayesianLDA_DESCR = """
59+ Bayesian Multiclass linear discriminant analysis. The algorithm
60+ learns a projection matrix `P` that projects a feature matrix `Xtrain` onto a lower
61+ dimensional space of dimension `out_dim` such that the trace of the transformed
62+ between-class scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the
63+ transformed within-class scatter matrix (`Pᵀ*Sw*P`). The projection matrix is scaled
64+ such that `Pᵀ*Sw*P = n` or `Pᵀ*Σw*P=I` (Where `n` is the number of training samples
65+ and `Σw` is the within-class covariance matrix).
66+ Predicted class posterior probability distibution are derived by applying Bayes rule
67+ with a multivariate Gaussian class-conditional distribution.
68+ """
69+ const SubspaceLDA_DESCR = """
70+ Multiclass linear discriminant analysis. Suitable for high
71+ dimensional data (Avoids computing scatter matrices `Sw` ,`Sb`). The algorithm learns a
72+ projection matrix `P = W*L` that projects a feature matrix `Xtrain` onto a lower
73+ dimensional space of dimension `nc - 1` such that the trace of the transformed
74+ between-class scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the
75+ transformed within-class scatter matrix (`Pᵀ*Sw*P`). The projection matrix is scaled
76+ such that `Pᵀ*Sw*P = mult*I` or `Pᵀ*Σw*P=mult/(n-nc)*I` (where `n` is the number of
77+ training samples, mult` is one of `n` or `1` depending on whether `Sb` is normalized,
78+ `Σw` is the within-class covariance matrix, and `nc` is the number of unique classes
79+ in `y`) and also obeys `Wᵀ*Sb*p = λ*Wᵀ*Sw*p`, for every column `p` in `P`.
80+ Predicted class posterior probability for feature matrix `Xtest` are derived by
81+ applying a softmax transformation to a matrix `Pr`, such that rowᵢ of `Pr` contains
82+ computed distances(based on a distance metric) in the transformed space of rowᵢ in
83+ `Xtest` to the centroid of each class.
84+ """
85+ const BayesianSubspaceLDA_DESCR = """
86+ Bayesian Multiclass linear discriminant analysis. Suitable for high dimensional data
87+ (Avoids computing scatter matrices `Sw` ,`Sb`). The algorithm learns a projection
88+ matrix `P = W*L` (`Sw`), that projects a feature matrix `Xtrain` onto a lower
89+ dimensional space of dimension `nc-1` such that the trace of the transformed
90+ between-class scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the
91+ transformed within-class scatter matrix (`Pᵀ*Sw*P`). The projection matrix is scaled
92+ such that `Pᵀ*Sw*P = mult*I` or `Pᵀ*Σw*P=mult/(n-nc)*I` (where `n` is the number of
93+ training samples, `mult` is one of `n` or `1` depending on whether `Sb` is normalized,
94+ `Σw` is the within-class covariance matrix, and `nc` is the number of unique classes in
95+ `y`) and also obeys `Wᵀ*Sb*p = λ*Wᵀ*Sw*p`, for every column `p` in `P`.
96+ Posterior class probability distibution are derived by applying Bayes rule with a
97+ multivariate Gaussian class-conditional distribution
98+ """
99+ const LinearRegressor_DESCR = """
100+ Linear Regression. Learns a linear combination of given
101+ variables to fit the response by minimizing the squared error between.
102+ """
103+ const MultitargetLinearRegressor_DESCR = """
104+ Multitarget Linear Regression. Learns linear combinations of given
105+ variables to fit the responses by minimizing the squared error between.
106+ """
107+ const RidgeRegressor_DESCR = """
108+ Ridge regressor with regularization parameter lambda. Learns a
109+ linear regression with a penalty on the l2 norm of the coefficients.
110+ """
111+ const MultitargetRidgeRegressor_DESCR = """
112+ Multitarget Ridge regressor with regularization parameter lambda. Learns a
113+ Multitarget linear regression with a penalty on the l2 norm of the coefficients.
114+ """
101115const PKG = " MLJMultivariateStatsInterface"
102116
103117# ===================================================================
0 commit comments