Skip to content

Commit 12f9ca2

Browse files
committed
replace out_dim -> outdim
1 parent 5718b7c commit 12f9ca2

File tree

1 file changed

+24
-24
lines changed

1 file changed

+24
-24
lines changed

src/models/discriminant_analysis.jl

Lines changed: 24 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -36,26 +36,26 @@ download?doi=10.1.1.89.7068&rep=rep1&type=pdf).
3636
method::Symbol = :gevd::(_ in (:gevd, :whiten))
3737
cov_w::CovarianceEstimator = MS.SimpleCovariance()
3838
cov_b::CovarianceEstimator = MS.SimpleCovariance()
39-
out_dim::Int = 0::(_ ≥ 0)
39+
outdim::Int = 0::(_ ≥ 0)
4040
regcoef::Float64 = 1e-6::(_ ≥ 0)
4141
dist::SemiMetric = SqEuclidean()
4242
end
4343

4444
function MMI.fit(model::LDA, ::Int, X, y)
45-
Xm_t, yplain, classes_seen, p, n, nc, nclasses, integers_seen, out_dim =
45+
Xm_t, yplain, classes_seen, p, n, nc, nclasses, integers_seen, outdim =
4646
_check_lda_data(model, X, y)
4747
core_res = MS.fit(
4848
MS.MulticlassLDA, nc, Xm_t, Int.(yplain);
4949
method=model.method,
50-
outdim=out_dim,
50+
outdim,
5151
regcoef=model.regcoef,
5252
covestimator_within=model.cov_w,
5353
covestimator_between=model.cov_b
5454
)
5555
cache = nothing
5656
report = (
5757
classes=classes_seen,
58-
out_dim=MS.size(core_res)[2],
58+
outdim=MS.size(core_res)[2],
5959
projected_class_means=MS.classmeans(core_res),
6060
mean=MS.mean(core_res),
6161
class_weights=MS.classweights(core_res),
@@ -102,17 +102,17 @@ function _check_lda_data(model, X, y)
102102
# Check output dimension default is min(p, nc-1)
103103
def_outdim = min(p, nc - 1)
104104
# If unset (0) use the default; otherwise try to use the provided one
105-
out_dim = ifelse(model.out_dim == 0, def_outdim, model.out_dim)
105+
outdim = ifelse(model.outdim == 0, def_outdim, model.outdim)
106106
# Check if the given one is sensible
107-
if out_dim > p
107+
if outdim > p
108108
throw(
109109
ArgumentError(
110-
"`out_dim` must not be larger than `p`"*
110+
"`outdim` must not be larger than `p`"*
111111
"where `p` is the number of features in `X`"
112112
)
113113
)
114114
end
115-
return Xm_t, yplain, classes_seen, p, n, nc, nclasses, integers_seen, out_dim
115+
return Xm_t, yplain, classes_seen, p, n, nc, nclasses, integers_seen, outdim
116116
end
117117

118118
function MMI.fitted_params(::LDA, (core_res, classes_seen))
@@ -187,13 +187,13 @@ http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.89.7068&rep=rep1&type=p
187187
method::Symbol = :gevd::(_ in (:gevd, :whiten))
188188
cov_w::CovarianceEstimator=MS.SimpleCovariance()
189189
cov_b::CovarianceEstimator=MS.SimpleCovariance()
190-
out_dim::Int=0::(_ ≥ 0)
190+
outdim::Int=0::(_ ≥ 0)
191191
regcoef::Float64=1e-6::(_ ≥ 0)
192192
priors::Union{Nothing, Vector{Float64}}=nothing
193193
end
194194

195195
function MMI.fit(model::BayesianLDA, ::Int, X, y)
196-
Xm_t, yplain, classes_seen, p, n, nc, nclasses, integers_seen, out_dim =
196+
Xm_t, yplain, classes_seen, p, n, nc, nclasses, integers_seen, outdim =
197197
_check_lda_data(model, X, y)
198198
## If piors are specified check if they makes sense.
199199
## This was put here to through errors much earlier
@@ -204,7 +204,7 @@ function MMI.fit(model::BayesianLDA, ::Int, X, y)
204204
core_res = MS.fit(
205205
MS.MulticlassLDA, nc, Xm_t, Int.(yplain);
206206
method=model.method,
207-
outdim=out_dim,
207+
outdim,
208208
regcoef=model.regcoef,
209209
covestimator_within=model.cov_w,
210210
covestimator_between=model.cov_b
@@ -220,7 +220,7 @@ function MMI.fit(model::BayesianLDA, ::Int, X, y)
220220
cache = nothing
221221
report = (
222222
classes=classes_seen,
223-
out_dim=MS.size(core_res)[2],
223+
outdim=MS.size(core_res)[2],
224224
projected_class_means=MS.classmeans(core_res),
225225
mean=MS.mean(core_res),
226226
class_weights=MS.classweights(core_res),
@@ -336,12 +336,12 @@ IEEE Trans. Patt. Anal. & Mach. Int., 26: 995-1006.
336336
"""
337337
@mlj_model mutable struct SubspaceLDA <: MMI.Probabilistic
338338
normalize::Bool = true
339-
out_dim::Int = 0::(_ ≥ 0)
339+
outdim::Int = 0::(_ ≥ 0)
340340
dist::SemiMetric = SqEuclidean()
341341
end
342342

343343
function MMI.fit(model::SubspaceLDA, ::Int, X, y)
344-
Xm_t, yplain, classes_seen, p, n, nc, nclasses, integers_seen, out_dim =
344+
Xm_t, yplain, classes_seen, p, n, nc, nclasses, integers_seen, outdim =
345345
_check_lda_data(model, X, y)
346346

347347
core_res = MS.fit(
@@ -361,18 +361,18 @@ function MMI.fit(model::SubspaceLDA, ::Int, X, y)
361361
class_weights=MS.classweights(core_res),
362362
nc=nc
363363
)
364-
fitresult = (core_res, out_dim, classes_seen)
364+
fitresult = (core_res, outdim, classes_seen)
365365
return fitresult, cache, report
366366
end
367367

368368
function MMI.fitted_params(::SubspaceLDA, (core_res, _))
369369
return (projected_class_means=MS.classmeans(core_res), projection_matrix=MS.projection(core_res))
370370
end
371371

372-
function MMI.predict(m::SubspaceLDA, (core_res, out_dim, classes_seen), Xnew)
372+
function MMI.predict(m::SubspaceLDA, (core_res, outdim, classes_seen), Xnew)
373373
# projection of Xnew, XWt is nt x o where o = number of out dims
374374
# nt = number ot test samples
375-
proj = core_res.projw * view(core_res.projLDA, :, 1:out_dim) #proj is the projection_matrix
375+
proj = core_res.projw * view(core_res.projLDA, :, 1:outdim) #proj is the projection_matrix
376376
XWt = MMI.matrix(Xnew) * proj
377377
# centroids in the transformed space, nc x o
378378
centroids = transpose(core_res.cmeans) * proj
@@ -423,12 +423,12 @@ For more information about the algorithm, see the paper by Howland & Park (2006)
423423
"""
424424
@mlj_model mutable struct BayesianSubspaceLDA <: MMI.Probabilistic
425425
normalize::Bool=false
426-
out_dim::Int= 0::(_ ≥ 0)
426+
outdim::Int= 0::(_ ≥ 0)
427427
priors::Union{Nothing, Vector{Float64}}=nothing
428428
end
429429

430430
function MMI.fit(model::BayesianSubspaceLDA, ::Int, X, y)
431-
Xm_t, yplain, classes_seen, p, n, nc, nclasses, integers_seen, out_dim =
431+
Xm_t, yplain, classes_seen, p, n, nc, nclasses, integers_seen, outdim =
432432
_check_lda_data(model, X, y)
433433
## If piors are specified check if they makes sense.
434434
## This was put here to through errors much earlier
@@ -461,7 +461,7 @@ function MMI.fit(model::BayesianSubspaceLDA, ::Int, X, y)
461461
class_weights=MS.classweights(core_res),
462462
nc=nc
463463
)
464-
fitresult = (core_res, out_dim, classes_seen, priors, n, mult)
464+
fitresult = (core_res, outdim, classes_seen, priors, n, mult)
465465
return fitresult, cache, report
466466
end
467467

@@ -479,13 +479,13 @@ end
479479

480480
function MMI.predict(
481481
m::BayesianSubspaceLDA,
482-
(core_res, out_dim, classes_seen, priors, n, mult),
482+
(core_res, outdim, classes_seen, priors, n, mult),
483483
Xnew
484484
)
485485
# projection of Xnew, XWt is nt x o where o = number of out dims
486486
# nt = number ot test samples
487487
#proj is the projection_matrix
488-
proj = core_res.projw * view(core_res.projLDA, :, 1:out_dim)
488+
proj = core_res.projw * view(core_res.projLDA, :, 1:outdim)
489489
XWt = MMI.matrix(Xnew) * proj
490490

491491
# centroids in the transformed space, nc x o
@@ -510,9 +510,9 @@ function MMI.predict(
510510
return MMI.UnivariateFinite(classes_seen, Pr)
511511
end
512512

513-
function MMI.transform(m::T, (core_res, out_dim, _), X) where T<:Union{SubspaceLDA, BayesianSubspaceLDA}
513+
function MMI.transform(m::T, (core_res, outdim, _), X) where T<:Union{SubspaceLDA, BayesianSubspaceLDA}
514514
# projection of X, XWt is nt x o where o = out dims
515-
proj = core_res.projw * view(core_res.projLDA, :, 1:out_dim)
515+
proj = core_res.projw * view(core_res.projLDA, :, 1:outdim)
516516
#proj is the projection_matrix
517517
XWt = MMI.matrix(X) * proj
518518
return MMI.table(XWt, prototype = X)

0 commit comments

Comments
 (0)