@@ -78,16 +78,16 @@ function PolyConfig(g::Polynomial{T}, reduced_exponents::Matrix{UInt16}, big_loo
7878 zeros (promote_type (T, S), n))
7979end
8080
81- function Base. deepcopy (cfg:: PolyConfig )
82- PolyConfig (
83- deepcopy (cfg. monomials_delimiters),
84- deepcopy (cfg. monomials),
85- deepcopy (cfg. grad_monomials_delimiters),
86- deepcopy (cfg. grad_monomials),
87- deepcopy (cfg. reduced_exponents_delimiters),
88- deepcopy (cfg. reduced_exponents_map),
89- deepcopy (cfg. reduced_values))
90- end
81+ # function Base.deepcopy(cfg::PolyConfig)
82+ # PolyConfig(
83+ # deepcopy(cfg.monomials_delimiters),
84+ # deepcopy(cfg.monomials),
85+ # deepcopy(cfg.grad_monomials_delimiters),
86+ # deepcopy(cfg.grad_monomials),
87+ # deepcopy(cfg.reduced_exponents_delimiters),
88+ # deepcopy(cfg.reduced_exponents_map),
89+ # deepcopy(cfg.reduced_values))
90+ # end
9191
9292@inline function fillreduced_values! (
9393 cfg:: PolyConfig{T} ,
@@ -105,21 +105,21 @@ end
105105 k = 1
106106 j = 1
107107 nextj_at = j + reds[j]
108- res = cfs[j]
108+ @inbounds res = cfs[j]
109109 while k ≤ N || j < n
110110 togo = nextj_at - k
111111 if togo == 0
112- v[j] = res
112+ @inbounds v[j] = res
113113 j += 1
114- nextj_at += reds[j]
115- res = cfs[j]
114+ @inbounds nextj_at += reds[j]
115+ @inbounds res = cfs[j]
116116 else
117- i, l = rem[k]
118- res *= dv[i, l]
117+ @inbounds i, l = rem[k]
118+ @inbounds res *= dv[i, l]
119119 k += 1
120120 end
121121 end
122- v[j] = res
122+ @inbounds v[j] = res
123123 v
124124end
125125
@@ -133,14 +133,14 @@ end
133133 return values
134134 end
135135 for i= 1 : m
136- l = diffs[i,1 ]
136+ @inbounds l = diffs[i,1 ]
137137 xi = xs[i]
138138 v = l == 0 ? one (T) : pow (xi, l)
139- values[i, 1 ] = v
139+ @inbounds values[i, 1 ] = v
140140 for k= 2 : n
141- l = diffs[i,k]
141+ @inbounds l = diffs[i,k]
142142 v = l == 0 ? v : v * pow (xi, l)
143- values[i,k] = v
143+ @inbounds values[i,k] = v
144144 end
145145 end
146146 nothing
@@ -155,15 +155,15 @@ end
155155 n = length (mds)
156156 k = 1
157157 j = 1
158- nextj_at = j + mds[j]
159- res = v[j]
158+ @inbounds nextj_at = j + mds[j]
159+ @inbounds res = v[j]
160160 out = zero (T)
161161 while k ≤ N || j < n
162162 if k == nextj_at
163163 out += res
164164 j += 1
165- nextj_at += mds[j]
166- res = v[j]
165+ @inbounds nextj_at += mds[j]
166+ @inbounds res = v[j]
167167 else
168168 res *= x[ms[k]]
169169 k += 1
180180 gms = cfg. grad_monomials
181181
182182 for i= 1 : length (gmds)
183- u[i] = _gradient (v, gmds[i], gms[i], x)
183+ @inbounds ui = _gradient (v, gmds[i], gms[i], x)
184+ u[i] = ui
184185 end
185186 u
186187end
@@ -195,7 +196,8 @@ Store the gradient in the i-th row of 'u'.
195196 gms = cfg. grad_monomials
196197
197198 for j= 1 : length (gmds)
198- u[i, j] = _gradient (v, gmds[j], gms[j], x)
199+ @inbounds uij = _gradient (v, gmds[j], gms[j], x)
200+ u[i, j] = uij
199201 end
200202 u
201203end
@@ -205,17 +207,17 @@ function _gradient(v::AbstractVector{T}, mds, ms, x) where T
205207 n = length (mds)
206208 k = 1
207209 j = 1
208- delim, exponent = mds[j]
210+ @inbounds delim, exponent = mds[j]
209211 nextj_at = j + delim
210- res = v[j] * exponent
212+ @inbounds res = v[j] * exponent
211213 out = zero (T)
212214 while k ≤ N || j < n
213215 if k == nextj_at
214216 out += res
215217 j += 1
216- delim, exponent = mds[j]
218+ @inbounds delim, exponent = mds[j]
217219 nextj_at += delim
218- res = exponent == 0 ? zero (T) : v[j] * exponent
220+ @inbounds res = exponent == 0 ? zero (T) : v[j] * exponent
219221 else
220222 res *= x[ms[k]]
221223 k += 1
@@ -263,12 +265,12 @@ function differences(f::Polynomial{T}, ::Type{S}) where {T, S}
263265end
264266
265267
266- function Base. deepcopy (cfg:: GradientConfig )
267- GradientConfig (
268- deepcopy (cfg. poly),
269- deepcopy (cfg. differences),
270- deepcopy (cfg. differences_values))
271- end
268+ # function Base.deepcopy(cfg::GradientConfig)
269+ # GradientConfig(
270+ # deepcopy(cfg.poly),
271+ # deepcopy(cfg.differences),
272+ # deepcopy(cfg.differences_values))
273+ # end
272274
273275"""
274276 evaluate(g, x, cfg::GradientConfig [, precomputed=false])
0 commit comments