@@ -14,8 +14,7 @@ using LinearAlgebra
1414
1515# ===================================================================
1616# # EXPORTS
17- export LinearRegressor, RidgeRegressor, PCA, KernelPCA, ICA, PPCA, FactorAnalysis, LDA,
18- BayesianLDA, SubspaceLDA, BayesianSubspaceLDA
17+ # Models are exported automatically by `@mlj_model` macro
1918
2019# ===================================================================
2120# # Re-EXPORTS
@@ -34,70 +33,84 @@ const FactorAnalysisResultType = MS.FactorAnalysis
3433const default_kernel = (x, y) -> x' y # default kernel used in KernelPCA
3534
3635# Definitions of model descriptions for use in model doc-strings.
37- const PCA_DESCR = """ Principal component analysis. Learns a linear transformation to
38- project the data on a lower dimensional space while preserving most of the initial
39- variance.
40- """
36+ const PCA_DESCR = """
37+ Principal component analysis. Learns a linear transformation to
38+ project the data on a lower dimensional space while preserving most of the initial
39+ variance.
40+ """
4141const KPCA_DESCR = " Kernel principal component analysis."
4242const ICA_DESCR = " Independent component analysis."
4343const PPCA_DESCR = " Probabilistic principal component analysis"
4444const FactorAnalysis_DESCR = " Factor Analysis"
45- const LDA_DESCR = """ Multiclass linear discriminant analysis. The algorithm learns a
46- projection matrix `P` that projects a feature matrix `Xtrain` onto a lower dimensional
47- space of dimension `out_dim` such that the trace of the transformed between-class scatter
48- matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the transformed within-class
49- scatter matrix (`Pᵀ*Sw*P`).The projection matrix is scaled such that `Pᵀ*Sw*P=I` or
50- `Pᵀ*Σw*P=I`(where `Σw` is the within-class covariance matrix) .
51- Predicted class posterior probability for feature matrix `Xtest` are derived by applying
52- a softmax transformationto a matrix `Pr`, such that rowᵢ of `Pr` contains computed
53- distances(based on a distance metric) in the transformed space of rowᵢ in `Xtest` to the
54- centroid of each class.
55- """
56- const BayesianLDA_DESCR = """ Bayesian Multiclass linear discriminant analysis. The algorithm
57- learns a projection matrix `P` that projects a feature matrix `Xtrain` onto a lower
58- dimensional space of dimension `out_dim` such that the trace of the transformed
59- between-class scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the
60- transformed within-class scatter matrix (`Pᵀ*Sw*P`). The projection matrix is scaled such
61- that `Pᵀ*Sw*P = n` or `Pᵀ*Σw*P=I` (Where `n` is the number of training samples and `Σw`
62- is the within-class covariance matrix).
63- Predicted class posterior probability distibution are derived by applying Bayes rule with
64- a multivariate Gaussian class-conditional distribution.
65- """
66- const SubspaceLDA_DESCR = """ Multiclass linear discriminant analysis. Suitable for high
67- dimensional data (Avoids computing scatter matrices `Sw` ,`Sb`). The algorithm learns a
68- projection matrix `P = W*L` that projects a feature matrix `Xtrain` onto a lower
69- dimensional space of dimension `nc - 1` such that the trace of the transformed
70- between-class scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the
71- transformed within-class scatter matrix (`Pᵀ*Sw*P`). The projection matrix is scaled such
72- that `Pᵀ*Sw*P = mult*I` or `Pᵀ*Σw*P=mult/(n-nc)*I` (where `n` is the number of training
73- samples, mult` is one of `n` or `1` depending on whether `Sb` is normalized, `Σw` is the
74- within-class covariance matrix, and `nc` is the number of unique classes in `y`) and also
75- obeys `Wᵀ*Sb*p = λ*Wᵀ*Sw*p`, for every column `p` in `P`.
76- Predicted class posterior probability for feature matrix `Xtest` are derived by applying a
77- softmax transformation to a matrix `Pr`, such that rowᵢ of `Pr` contains computed
78- distances(based on a distance metric) in the transformed space of rowᵢ in `Xtest` to the
79- centroid of each class.
80- """
81- const BayesianSubspaceLDA_DESCR = """ Bayesian Multiclass linear discriminant analysis.
82- Suitable for high dimensional data (Avoids computing scatter matrices `Sw` ,`Sb`). The
83- algorithm learns a projection matrix `P = W*L` (`Sw`), that projects a feature matrix
84- `Xtrain` onto a lower dimensional space of dimension `nc-1` such that the trace of the
85- transformed between-class scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace
86- of the transformed within-class scatter matrix (`Pᵀ*Sw*P`). The projection matrix is
87- scaled such that `Pᵀ*Sw*P = mult*I` or `Pᵀ*Σw*P=mult/(n-nc)*I` (where `n` is the number of
88- training samples, `mult` is one of `n` or `1` depending on whether `Sb` is normalized,
89- `Σw` is the within-class covariance matrix, and `nc` is the number of unique classes in
90- `y`) and also obeys `Wᵀ*Sb*p = λ*Wᵀ*Sw*p`, for every column `p` in `P`.
91- Posterior class probability distibution are derived by applying Bayes rule with a
92- multivariate Gaussian class-conditional distribution
93- """
94- const LINEAR_DESCR = """ Linear regression. Learns a linear combination(s) of given
95- variables to fit the responses by minimizing the squared error between.
96- """
97- const RIDGE_DESCR = """ Ridge regressor with regularization parameter lambda. Learns a
98- linear regression with a penalty on the l2 norm of the coefficients.
99- """
100-
45+ const LDA_DESCR = """
46+ Multiclass linear discriminant analysis. The algorithm learns a
47+ projection matrix `P` that projects a feature matrix `Xtrain` onto a lower dimensional
48+ space of dimension `out_dim` such that the trace of the transformed between-class
49+ scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the transformed
50+ within-class scatter matrix (`Pᵀ*Sw*P`).The projection matrix is scaled such that
51+ `Pᵀ*Sw*P=I` or `Pᵀ*Σw*P=I`(where `Σw` is the within-class covariance matrix) .
52+ Predicted class posterior probability for feature matrix `Xtest` are derived by
53+ applying a softmax transformationto a matrix `Pr`, such that rowᵢ of `Pr` contains
54+ computed distances(based on a distance metric) in the transformed space of rowᵢ in
55+ `Xtest` to the centroid of each class.
56+ """
57+ const BayesianLDA_DESCR = """
58+ Bayesian Multiclass linear discriminant analysis. The algorithm
59+ learns a projection matrix `P` that projects a feature matrix `Xtrain` onto a lower
60+ dimensional space of dimension `out_dim` such that the trace of the transformed
61+ between-class scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the
62+ transformed within-class scatter matrix (`Pᵀ*Sw*P`). The projection matrix is scaled
63+ such that `Pᵀ*Sw*P = n` or `Pᵀ*Σw*P=I` (Where `n` is the number of training samples
64+ and `Σw` is the within-class covariance matrix).
65+ Predicted class posterior probability distibution are derived by applying Bayes rule
66+ with a multivariate Gaussian class-conditional distribution.
67+ """
68+ const SubspaceLDA_DESCR = """
69+ Multiclass linear discriminant analysis. Suitable for high
70+ dimensional data (Avoids computing scatter matrices `Sw` ,`Sb`). The algorithm learns a
71+ projection matrix `P = W*L` that projects a feature matrix `Xtrain` onto a lower
72+ dimensional space of dimension `nc - 1` such that the trace of the transformed
73+ between-class scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the
74+ transformed within-class scatter matrix (`Pᵀ*Sw*P`). The projection matrix is scaled
75+ such that `Pᵀ*Sw*P = mult*I` or `Pᵀ*Σw*P=mult/(n-nc)*I` (where `n` is the number of
76+ training samples, mult` is one of `n` or `1` depending on whether `Sb` is normalized,
77+ `Σw` is the within-class covariance matrix, and `nc` is the number of unique classes
78+ in `y`) and also obeys `Wᵀ*Sb*p = λ*Wᵀ*Sw*p`, for every column `p` in `P`.
79+ Predicted class posterior probability for feature matrix `Xtest` are derived by
80+ applying a softmax transformation to a matrix `Pr`, such that rowᵢ of `Pr` contains
81+ computed distances(based on a distance metric) in the transformed space of rowᵢ in
82+ `Xtest` to the centroid of each class.
83+ """
84+ const BayesianSubspaceLDA_DESCR = """
85+ Bayesian Multiclass linear discriminant analysis. Suitable for high dimensional data
86+ (Avoids computing scatter matrices `Sw` ,`Sb`). The algorithm learns a projection
87+ matrix `P = W*L` (`Sw`), that projects a feature matrix `Xtrain` onto a lower
88+ dimensional space of dimension `nc-1` such that the trace of the transformed
89+ between-class scatter matrix(`Pᵀ*Sb*P`) is maximized relative to the trace of the
90+ transformed within-class scatter matrix (`Pᵀ*Sw*P`). The projection matrix is scaled
91+ such that `Pᵀ*Sw*P = mult*I` or `Pᵀ*Σw*P=mult/(n-nc)*I` (where `n` is the number of
92+ training samples, `mult` is one of `n` or `1` depending on whether `Sb` is normalized,
93+ `Σw` is the within-class covariance matrix, and `nc` is the number of unique classes in
94+ `y`) and also obeys `Wᵀ*Sb*p = λ*Wᵀ*Sw*p`, for every column `p` in `P`.
95+ Posterior class probability distibution are derived by applying Bayes rule with a
96+ multivariate Gaussian class-conditional distribution
97+ """
98+ const LinearRegressor_DESCR = """
99+ Linear Regression. Learns a linear combination of given
100+ variables to fit the response by minimizing the squared error between.
101+ """
102+ const MultitargetLinearRegressor_DESCR = """
103+ Multitarget Linear Regression. Learns linear combinations of given
104+ variables to fit the responses by minimizing the squared error between.
105+ """
106+ const RidgeRegressor_DESCR = """
107+ Ridge regressor with regularization parameter lambda. Learns a
108+ linear regression with a penalty on the l2 norm of the coefficients.
109+ """
110+ const MultitargetRidgeRegressor_DESCR = """
111+ Multitarget Ridge regressor with regularization parameter lambda. Learns a
112+ Multitarget linear regression with a penalty on the l2 norm of the coefficients.
113+ """
101114const PKG = " MLJMultivariateStatsInterface"
102115
103116# ===================================================================
0 commit comments