Skip to content

Commit 5333367

Browse files
committed
remove redundant constraints in LAD and Quantile Regression
1 parent 196f3c5 commit 5333367

File tree

3 files changed

+6
-10
lines changed

3 files changed

+6
-10
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@
33
- Update citation of `robhatreg` a.k.a Robust Hat Matrix based Regression Estimator
44
- Fix typos in code, code comments, and documentation
55
- Replace one dimensional `Array{` definitions with `Vector{`
6+
- Define deviations and their bounds in a single line and remove additional constraints in LAD and Quantile Regression
7+
68

79
# v0.11.5
810

src/lad.jl

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,8 @@ function lad_exact(X::AbstractMatrix{Float64}, y::AbstractVector{Float64})
106106
m = JuMP.Model(HiGHS.Optimizer)
107107
set_silent(m)
108108

109-
JuMP.@variable(m, d[1:(2n)])
109+
# d[i] >= 0 for i = 1, ..., 2n
110+
JuMP.@variable(m, d[1:(2n)] .>= 0)
110111
JuMP.@variable(m, beta[1:p])
111112

112113
JuMP.@objective(m, Min, sum(d[i] for i = 1:(2n)))
@@ -115,10 +116,6 @@ function lad_exact(X::AbstractMatrix{Float64}, y::AbstractVector{Float64})
115116
c = JuMP.@constraint(m, y[i] - sum(X[i, :] .* beta) + d[i] - d[n+i] == 0)
116117
end
117118

118-
for i = 1:(2n)
119-
JuMP.@constraint(m, d[i] >= 0)
120-
end
121-
122119
JuMP.optimize!(m)
123120

124121
betahats = JuMP.value.(beta)

src/quantileregression.jl

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,8 @@ function quantileregression(X::AbstractMatrix{Float64}, y::AbstractVector{Float6
8686
m = JuMP.Model(HiGHS.Optimizer)
8787
set_silent(m)
8888

89-
JuMP.@variable(m, d[1:(2n)])
89+
# d[i] > 0 for i = 1:2n
90+
JuMP.@variable(m, d[1:(2n)] .>= 0)
9091
JuMP.@variable(m, beta[1:p])
9192

9293
JuMP.@objective(
@@ -99,10 +100,6 @@ function quantileregression(X::AbstractMatrix{Float64}, y::AbstractVector{Float6
99100
_ = JuMP.@constraint(m, y[i] - sum(X[i, :] .* beta) + d[i] - d[n+i] == 0)
100101
end
101102

102-
for i = 1:(2n)
103-
JuMP.@constraint(m, d[i] >= 0)
104-
end
105-
106103
JuMP.optimize!(m)
107104

108105
betahats = JuMP.value.(beta)

0 commit comments

Comments
 (0)