Skip to content

Commit 73bafed

Browse files
authored
Upgrade ensmallen to 2.11.3 (#24)
* Upgrade to ensmallen 2.11.3 * Update NEWS with upstream entries. * Bump version * Add ChangeLog entry * Update submission comments
1 parent 834265f commit 73bafed

37 files changed

+173
-32
lines changed

ChangeLog

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,13 @@
1-
2019-01-14 James Balamuta <[email protected]>
1+
2020-02-19 James Balamuta <[email protected]>
2+
3+
* DESCRIPTION (Version): Release 2.11.3
4+
5+
* NEWS.md: Update for Ensmallen release 2.11.3
6+
7+
* inst/include/ensmallen_bits: Upgraded to Ensmallen 2.11.3
8+
* inst/include/ensmallen.hpp: ditto
9+
10+
2020-01-14 James Balamuta <[email protected]>
211

312
* inst/include/RcppEnsmallen.h: Removed header ordering checks
413

DESCRIPTION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
Package: RcppEnsmallen
22
Title: Header-Only C++ Mathematical Optimization Library for 'Armadillo'
3-
Version: 0.2.11.1.1
3+
Version: 0.2.11.3.1
44
Authors@R: c(
55
person("James Joseph", "Balamuta", email = "[email protected]",
66
role = c("aut", "cre", "cph"),

NEWS.md

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,19 @@
1+
# RcppEnsmallen 0.2.11.3.1
2+
3+
- Upgraded to ensmallen 2.13.1: "The Poster Session Is Full" (2020-02-19)
4+
- Prevent spurious compiler warnings
5+
([#161](https://github.com/mlpack/ensmallen/pull/161)).
6+
- Fix minor memory leaks
7+
([#167](https://github.com/mlpack/ensmallen/pull/167)).
8+
- Revamp CMake configuration
9+
([#152](https://github.com/mlpack/ensmallen/pull/152)).
10+
- Allow callback instantiation for SGD based optimizer
11+
([#138](https://github.com/mlpack/ensmallen/pull/155)).
12+
- Minor test stability fixes on i386
13+
([#156](https://github.com/mlpack/ensmallen/pull/156)).
14+
- Fix Lookahead MaxIterations() check.
15+
([#159](https://github.com/mlpack/ensmallen/pull/159)).
16+
117
# RcppEnsmallen 0.2.11.1.1
218

319
- Upgraded to ensmallen 2.11.1: "The Poster Session Is Full" (2019-12-28)

cran-comments.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
## Test environments
22

3-
* local OS X install, R 3.6.1
4-
* ubuntu 14.04 (on travis-ci), R 3.6.1
3+
* local macOS install, R 3.6.2
4+
* ubuntu 16.04 (with GitHub Actions), R 3.6.2
55
* win-builder (devel and release)
66

77
## R CMD check results

inst/include/ensmallen_bits/ada_bound/ada_bound.hpp

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,8 +113,9 @@ class AdaBoundType
113113
MatType& iterate,
114114
CallbackTypes&&... callbacks)
115115
{
116-
return optimizer.template Optimize<DecomposableFunctionType, MatType, GradType,
117-
CallbackTypes...>(function, iterate, callbacks...);
116+
return optimizer.template Optimize<DecomposableFunctionType, MatType,
117+
GradType, CallbackTypes...>(function, iterate,
118+
std::forward<CallbackTypes>(callbacks)...);
118119
}
119120

120121
//! Forward the MatType as GradType.

inst/include/ensmallen_bits/ada_delta/ada_delta.hpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,8 @@ class AdaDelta
105105
CallbackTypes&&... callbacks)
106106
{
107107
return optimizer.Optimize<SeparableFunctionType, MatType, GradType,
108-
CallbackTypes...>(function, iterate, callbacks...);
108+
CallbackTypes...>(function, iterate,
109+
std::forward<CallbackTypes>(callbacks)...);
109110
}
110111

111112
//! Forward the MatType as GradType.

inst/include/ensmallen_bits/ada_grad/ada_grad.hpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,8 @@ class AdaGrad
101101
CallbackTypes&&... callbacks)
102102
{
103103
return optimizer.Optimize<SeparableFunctionType, MatType, GradType,
104-
CallbackTypes...>(function, iterate, callbacks...);
104+
CallbackTypes...>(function, iterate,
105+
std::forward<CallbackTypes>(callbacks)...);
105106
}
106107

107108
//! Forward the MatType as GradType.

inst/include/ensmallen_bits/adam/adam.hpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ class AdamType
128128
{
129129
return optimizer.template Optimize<
130130
SeparableFunctionType, MatType, GradType, CallbackTypes...>(
131-
function, iterate, callbacks...);
131+
function, iterate, std::forward<CallbackTypes>(callbacks)...);
132132
}
133133

134134
//! Forward the MatType as GradType.

inst/include/ensmallen_bits/bigbatch_sgd/bigbatch_sgd.hpp

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,12 @@ class BigBatchSGD
101101
const double tolerance = 1e-5,
102102
const bool shuffle = true,
103103
const bool exactObjective = false);
104+
105+
/**
106+
* Clean any memory associated with the BigBatchSGD object.
107+
*/
108+
~BigBatchSGD();
109+
104110
/**
105111
* Optimize the given function using big-batch SGD. The given starting point
106112
* will be modified to store the finishing point of the algorithm, and the

inst/include/ensmallen_bits/bigbatch_sgd/bigbatch_sgd_impl.hpp

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,12 @@ BigBatchSGD<UpdatePolicyType>::BigBatchSGD(
3838
updatePolicy(UpdatePolicyType())
3939
{ /* Nothing to do. */ }
4040

41+
template<typename UpdatePolicyType>
42+
BigBatchSGD<UpdatePolicyType>::~BigBatchSGD()
43+
{
44+
instUpdatePolicy.Clean();
45+
}
46+
4147
//! Optimize the function (minimize).
4248
template<typename UpdatePolicyType>
4349
template<typename SeparableFunctionType,

0 commit comments

Comments
 (0)