|
1 | | -""" ReHLine: Regularized Composite ReHU/ReLU Loss Minimization """ |
| 1 | +""" ReHLine: Regularized Composite ReLU-ReHU Loss Minimization with Linear Computation and Linear Convergence """ |
2 | 2 |
|
3 | 3 | # Authors: Ben Dai <[email protected]> |
4 | | -# C++ support by Yixuan Qiu <[email protected]> |
| 4 | +# C++ support by Yixuan Qiu <[email protected]> |
5 | 5 |
|
6 | 6 | # License: MIT License |
7 | 7 |
|
8 | 8 | import numpy as np |
9 | 9 | from sklearn.base import BaseEstimator |
10 | | -import rehline |
11 | 10 | from sklearn.utils.validation import check_X_y, check_array, check_is_fitted |
12 | | -import base |
| 11 | +from .base import relu, rehu |
| 12 | +from ._internal import rehline_internal, rehline_result |
13 | 13 |
|
14 | 14 | def ReHLine_solver(X, U, V, |
15 | 15 | Tau=np.empty(shape=(0, 0)), |
16 | 16 | S=np.empty(shape=(0, 0)), T=np.empty(shape=(0, 0)), |
17 | 17 | A=np.empty(shape=(0, 0)), b=np.empty(shape=(0)), |
18 | 18 | max_iter=1000, tol=1e-4, shrink=True, verbose=True): |
19 | | - result = rehline.rehline_result() |
20 | | - rehline.rehline_internal(result, X, A, b, U, V, S, T, Tau, max_iter, tol, shrink, verbose) |
| 19 | + result = rehline_result() |
| 20 | + rehline_internal(result, X, A, b, U, V, S, T, Tau, max_iter, tol, shrink, verbose) |
21 | 21 | return result |
22 | 22 |
|
23 | 23 | class ReHLine(BaseEstimator): |
@@ -193,7 +193,7 @@ def call_ReLHLoss(self, input): |
193 | 193 | relu_input = (self.U.T * input[:,np.newaxis]).T + self.V |
194 | 194 | if self.H > 0: |
195 | 195 | rehu_input = (self.S.T * input[:,np.newaxis]).T + self.T |
196 | | - return np.sum(base.relu(relu_input), 0) + np.sum(base.rehu(rehu_input), 0) |
| 196 | + return np.sum(relu(relu_input), 0) + np.sum(rehu(rehu_input), 0) |
197 | 197 |
|
198 | 198 |
|
199 | 199 | def fit(self, X, sample_weight=None): |
|
0 commit comments