Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 3 additions & 11 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "MixedModels"
uuid = "ff71e718-51f3-5ec2-a782-8ffcbfa3c316"
author = ["Phillip Alday <[email protected]>", "Douglas Bates <[email protected]>"]
version = "4.35.2"
version = "4.36.0"

[deps]
Arrow = "69666777-d1a9-59fb-9406-91d4454c9d45"
Expand All @@ -14,11 +14,11 @@ JSON3 = "0f8b85d8-7281-11e9-16c2-39a750bddbf1"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Markdown = "d6f4376e-aef5-505a-96c1-9c027394607a"
MixedModelsDatasets = "7e9fb7ac-9f67-43bf-b2c8-96ba0796cbb6"
NLopt = "76087f3c-5699-56af-9a33-bf431cd00edd"
PooledArrays = "2dfb63ee-cc39-5dd5-95bd-886bf059d720"
PrecompileTools = "aea7be01-6a6a-4083-8856-8a6e6704d82a"
ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
PRIMA = "0a7d04aa-8ac2-47b3-b7a7-9dbd6ad661ed"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Expand All @@ -30,12 +30,6 @@ StructTypes = "856f2bd8-1eba-4b0a-8007-ebc267875bd4"
Tables = "bd369af6-aec1-5ad0-b16a-f7cc5008161c"
TypedTables = "9d95f2ec-7b3d-5a63-8d20-e2491e220bb9"

[weakdeps]
PRIMA = "0a7d04aa-8ac2-47b3-b7a7-9dbd6ad661ed"

[extensions]
MixedModelsPRIMAExt = ["PRIMA"]

[compat]
Aqua = "0.8"
Arrow = "1, 2"
Expand All @@ -51,7 +45,6 @@ JSON3 = "1"
LinearAlgebra = "1"
Markdown = "1"
MixedModelsDatasets = "0.1"
NLopt = "0.6, 1"
PRIMA = "0.2"
PooledArrays = "0.5, 1"
PrecompileTools = "1"
Expand All @@ -77,10 +70,9 @@ Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
ExplicitImports = "7d51a73a-1435-4ff3-83d9-f097790105c7"
InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
PRIMA = "0a7d04aa-8ac2-47b3-b7a7-9dbd6ad661ed"
StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3"
Suppressor = "fd094767-a336-5f1f-9728-57cf17d0bbfb"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["Aqua", "DataFrames", "ExplicitImports", "InteractiveUtils", "PRIMA", "StableRNGs", "Suppressor", "Test"]
test = ["Aqua", "DataFrames", "ExplicitImports", "InteractiveUtils", "StableRNGs", "Suppressor", "Test"]
Binary file added fitlogs/Apple_M_series/AppleAccelerate/d3_1.arrow
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file added fitlogs/Apple_M_series/OpenBLAS/dyestuff.arrow
Binary file not shown.
Binary file added fitlogs/Apple_M_series/OpenBLAS/pastes_1.arrow
Binary file not shown.
Binary file added fitlogs/Apple_M_series/OpenBLAS/pastes_2.arrow
Binary file not shown.
2 changes: 2 additions & 0 deletions issues/705/Project.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[deps]
MixedModels = "ff71e718-51f3-5ec2-a782-8ffcbfa3c316"
25 changes: 12 additions & 13 deletions src/MixedModels.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ using LinearAlgebra: ldiv!, lmul!, logdet, mul!, norm, normalize, normalize!, qr
using LinearAlgebra: rank, rdiv!, rmul!, svd, tril!
using Markdown: Markdown
using MixedModelsDatasets: dataset, datasets
using NLopt: NLopt, Opt
using PooledArrays: PooledArrays, PooledArray
using PRIMA
using PrecompileTools: PrecompileTools, @setup_workload, @compile_workload
using ProgressMeter: ProgressMeter, Progress, ProgressUnknown, finish!, next!
using Random: Random, AbstractRNG, randn!
Expand Down Expand Up @@ -128,10 +128,10 @@ export @formula,
parametricbootstrap,
pirls!,
predict,
profile,
profileσ,
profilesigma,
profilevc,
# profile,
# profileσ,
# profilesigma,
# profilevc,
pvalue,
pwrss,
ranef,
Expand Down Expand Up @@ -212,13 +212,12 @@ include("blockdescription.jl")
include("grouping.jl")
include("mimeshow.jl")
include("serialization.jl")
include("profile/profile.jl")
include("nlopt.jl")
#include("profile/profile.jl")
include("prima.jl")

# aliases with non-unicode function names
const settheta! = setθ!
const profilesigma = profileσ
# const profilesigma = profileσ

# COV_EXCL_START
@setup_workload begin
Expand All @@ -240,11 +239,11 @@ const profilesigma = profileσ
show(io, m)
show(io, m.PCA.subj)
show(io, m.rePCA)
fit(MixedModel,
@formula(use ~ 1 + age + abs2(age) + urban + livch + (1 | urban & dist)),
contra,
Bernoulli();
progress)
# fit(MixedModel,
# @formula(use ~ 1 + age + abs2(age) + urban + livch + (1 | urban & dist)),
# contra,
# Bernoulli();
# progress)
end
end
# COV_EXCL_STOP
Expand Down
50 changes: 47 additions & 3 deletions src/linearmixedmodel.jl
Original file line number Diff line number Diff line change
Expand Up @@ -496,10 +496,13 @@ function StatsAPI.fit!(
end

xmin, fmin = optimize!(m; progress, fitlog)
setθ!(m, xmin) # ensure that the parameters saved in m are xmin
rectify!(m) # flip signs of columns of m.λ elements with negative diagonal els
getθ!(xmin, m) # use the rectified values as xmin

## check if small non-negative parameter values can be set to zero
xmin_ = copy(xmin)
lb = optsum.lowerbd
lb = optsum.lowerbd # FIXME: this will not exist in the future. Check for on diagonal instead
for i in eachindex(xmin_)
if iszero(lb[i]) && zero(T) < xmin_[i] < optsum.xtol_zero_abs
xmin_[i] = zero(T)
Expand All @@ -509,21 +512,34 @@ function StatsAPI.fit!(
if (zeroobj = objective!(m, xmin_)) ≤ (fmin + optsum.ftol_zero_abs)
fmin = zeroobj
copyto!(xmin, xmin_)
fitlog && push!(optsum.fitlog, (copy(xmin), fmin))
fitlog && append!(push!(optsum.fitlog, fmin), xmin)
end
end

# unlike GLMM we don't need to populate optsum.finitial here
# because we do that during the initial guess and rescale check

## ensure that the parameter values saved in m are xmin
objective!(m)(xmin)
objective!(m, xmin)

optsum.final = xmin
optsum.fmin = fmin
return m
end

"""
fitlog(m::LinearMixedModel{T})

Extract `m.optsum.fitlog` as a `Tables.MatrixTable`
"""
function fitlog(m::LinearMixedModel)
header = append!([:objective], Symbol.("p_", join.(m.parmap, "_")))
return Tables.table(
collect(transpose(reshape(m.optsum.fitlog, length(m.optsum.initial) + 1, :)));
header,
)
end

"""
fitted!(v::AbstractArray{T}, m::LinearMixedModel{T})

Expand Down Expand Up @@ -954,6 +970,34 @@ function PCA(m::LinearMixedModel; corr::Bool=true)
return NamedTuple{_unique_fnames(m)}(PCA.(m.reterms, corr=corr))
end

"""
rectify!(m::LinearMixedModel)

For each element of m.λ check for negative values on the diagonal and flip the signs of the entire column when any are present.

This provides a canonical converged value of θ. We use unconstrained optimization followed by this reassignment to avoid the
hassle of constrained optimization.
"""
function rectify!(m::LinearMixedModel)
rectify!.(m.λ)
return m
end

function rectify!(λ::LowerTriangular)
for (j,c) in enumerate(eachcol(λ.data))
if c[j] < 0
c .*= -1
end
end
return λ
end

function rectify!(λ::Diagonal)
d = λ.diag
map!(abs, d, d)
return λ
end

"""
reevaluateAend!(m::LinearMixedModel)

Expand Down
122 changes: 0 additions & 122 deletions src/nlopt.jl

This file was deleted.

38 changes: 5 additions & 33 deletions src/optsummary.jl
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ Summary of an optimization
* `rhoend`: as in PRIMA, not used in NLopt

## MixedModels-specific fields, unrelated to the optimizer
* `fitlog`: A vector of tuples of parameter and objectives values from steps in the optimization
* `fitlog`: a `Vector{T}` recording the objective values and parameter vectors from the optimization
* `nAGQ`: number of adaptive Gauss-Hermite quadrature points in deviance evaluation for GLMMs
* `REML`: use the REML criterion for LMM fits
* `sigma`: a priori value for the residual standard deviation for LMM
Expand All @@ -60,8 +60,8 @@ Base.@kwdef mutable struct OptSummary{T<:AbstractFloat}
ftol_zero_abs::T = eltype(initial)(1.e-5)
maxfeval::Int = -1

optimizer::Symbol = :LN_BOBYQA
backend::Symbol = :nlopt
optimizer::Symbol = :newuoa
backend::Symbol = :prima

# the @kwdef macro isn't quite smart enough for us to use the type parameter
# for the default values, but we can fake it
Expand All @@ -76,7 +76,7 @@ Base.@kwdef mutable struct OptSummary{T<:AbstractFloat}
rhoend::T = rhobeg / 1_000_000

# not SVector because we would need to parameterize on size (which breaks GLMM)
fitlog::Vector{Tuple{Vector{T},T}} = Vector{Tuple{Vector{T},T}}()
fitlog::Vector{T} = T[]
nAGQ::Int = 1
REML::Bool = false
sigma::Union{T,Nothing} = nothing
Expand All @@ -85,40 +85,12 @@ end
function OptSummary(
initial::Vector{T},
lowerbd::Vector{S},
optimizer::Symbol=:LN_BOBYQA; kwargs...,
optimizer::Symbol=:newuoa; kwargs...,
) where {T<:AbstractFloat,S<:AbstractFloat}
TS = promote_type(T, S)
return OptSummary{TS}(; initial, lowerbd, optimizer, kwargs...)
end

"""
columntable(s::OptSummary, [stack::Bool=false])

Return `s.fitlog` as a `Tables.columntable`.

When `stack` is false (the default), there will be 3 columns in the result:
- `iter`: the iteration number
- `objective`: the value of the objective at that sample
- `θ`: the parameter vector at that sample

When `stack` is true, there will be 4 columns: `iter`, `objective`, `par`, and `value`
where `value` is the stacked contents of the `θ` vectors (the equivalent of `vcat(θ...)`)
and `par` is a vector of parameter numbers.
"""
function Tables.columntable(s::OptSummary; stack::Bool=false)
fitlog = s.fitlog
val = (; iter=axes(fitlog, 1), objective=last.(fitlog), θ=first.(fitlog))
stack || return val
θ1 = first(val.θ)
k = length(θ1)
return (;
iter=repeat(val.iter; inner=k),
objective=repeat(val.objective; inner=k),
par=repeat(1:k; outer=length(fitlog)),
value=foldl(vcat, val.θ; init=(eltype(θ1))[]),
)
end

function Base.show(io::IO, ::MIME"text/plain", s::OptSummary)
println(io, "Initial parameter vector: ", s.initial)
println(io, "Initial objective value: ", s.finitial)
Expand Down
Loading
Loading