@@ -154,12 +154,19 @@ it = (xᵢ for xᵢ in x)
154
154
155
155
# #### entropy
156
156
157
- @test entropy ([0.5 , 0.5 ]) ≈ 0.6931471805599453
158
- @test entropy ([0.2 , 0.3 , 0.5 ]) ≈ 1.0296530140645737
157
+ @test @inferred (entropy ([0.5 , 0.5 ])) ≈ 0.6931471805599453
158
+ @test @inferred (entropy ([1 // 2 , 1 // 2 ])) ≈ 0.6931471805599453
159
+ @test @inferred (entropy ([0.5f0 , 0.5f0 ])) isa Float32
160
+ @test @inferred (entropy ([0.2 , 0.3 , 0.5 ])) ≈ 1.0296530140645737
161
+ @test iszero (@inferred (entropy ([0 , 1 ])))
162
+ @test iszero (@inferred (entropy ([0.0 , 1.0 ])))
159
163
160
- @test entropy ([0.5 , 0.5 ],2 ) ≈ 1.0
161
- @test entropy ([0.2 , 0.3 , 0.5 ], 2 ) ≈ 1.4854752972273344
162
- @test entropy ([1.0 , 0.0 ]) ≈ 0.0
164
+ @test @inferred (entropy ([0.5 , 0.5 ], 2 )) ≈ 1.0
165
+ @test @inferred (entropy ([1 // 2 , 1 // 2 ], 2 )) ≈ 1.0
166
+ @test @inferred (entropy ([0.2 , 0.3 , 0.5 ], 2 )) ≈ 1.4854752972273344
167
+
168
+ @test_throws ArgumentError @inferred (entropy (Float64[]))
169
+ @test_throws ArgumentError @inferred (entropy (Int[]))
163
170
164
171
# #### Renyi entropies
165
172
# Generate a random probability distribution
@@ -200,12 +207,31 @@ scale = rand()
200
207
@test renyientropy (udist * scale, order) ≈ renyientropy (udist, order) - log (scale)
201
208
202
209
# #### Cross entropy
203
- @test crossentropy ([0.2 , 0.3 , 0.5 ], [0.3 , 0.4 , 0.3 ]) ≈ 1.1176681825904018
204
- @test crossentropy ([0.2 , 0.3 , 0.5 ], [0.3 , 0.4 , 0.3 ], 2 ) ≈ 1.6124543443825532
210
+ @test @inferred (crossentropy ([0.2 , 0.3 , 0.5 ], [0.3 , 0.4 , 0.3 ])) ≈ 1.1176681825904018
211
+ @test @inferred (crossentropy ([1 // 5 , 3 // 10 , 1 // 2 ], [0.3 , 0.4 , 0.3 ])) ≈ 1.1176681825904018
212
+ @test @inferred (crossentropy ([1 // 5 , 3 // 10 , 1 // 2 ], [0.3f0 , 0.4f0 , 0.3f0 ])) isa Float32
213
+ @test @inferred (crossentropy ([0.2 , 0.3 , 0.5 ], [0.3 , 0.4 , 0.3 ], 2 )) ≈ 1.6124543443825532
214
+ @test @inferred (crossentropy ([1 // 5 , 3 // 10 , 1 // 2 ], [0.3 , 0.4 , 0.3 ], 2 )) ≈ 1.6124543443825532
215
+ @test @inferred (crossentropy ([1 // 5 , 3 // 10 , 1 // 2 ], [0.3f0 , 0.4f0 , 0.3f0 ], 2f0 )) isa Float32
216
+
217
+ # deprecated, should throw an `ArgumentError` at some point
218
+ logpattern = (:warn , " support for empty collections will be removed since they do not represent proper probability distributions" )
219
+ @test iszero (@test_logs logpattern @inferred (crossentropy (Float64[], Float64[])))
220
+ @test iszero (@test_logs logpattern @inferred (crossentropy (Int[], Int[])))
205
221
206
222
# #### KL divergence
207
- @test kldivergence ([0.2 , 0.3 , 0.5 ], [0.3 , 0.4 , 0.3 ]) ≈ 0.08801516852582819
208
- @test kldivergence ([0.2 , 0.3 , 0.5 ], [0.3 , 0.4 , 0.3 ], 2 ) ≈ 0.12697904715521868
223
+ @test @inferred (kldivergence ([0.2 , 0.3 , 0.5 ], [0.3 , 0.4 , 0.3 ])) ≈ 0.08801516852582819
224
+ @test @inferred (kldivergence ([1 // 5 , 3 // 10 , 1 // 2 ], [0.3 , 0.4 , 0.3 ])) ≈ 0.08801516852582819
225
+ @test @inferred (kldivergence ([1 // 5 , 3 // 10 , 1 // 2 ], [0.3f0 , 0.4f0 , 0.3f0 ])) isa Float32
226
+ @test @inferred (kldivergence ([0.2 , 0.3 , 0.5 ], [0.3 , 0.4 , 0.3 ], 2 )) ≈ 0.12697904715521868
227
+ @test @inferred (kldivergence ([1 // 5 , 3 // 10 , 1 // 2 ], [0.3 , 0.4 , 0.3 ], 2 )) ≈ 0.12697904715521868
228
+ @test @inferred (kldivergence ([1 // 5 , 3 // 10 , 1 // 2 ], [0.3f0 , 0.4f0 , 0.3f0 ], 2f0 )) isa Float32
229
+ @test iszero (@inferred (kldivergence ([0 , 1 ], [0f0 , 1f0 ])))
230
+
231
+ # deprecated, should throw an `ArgumentError` at some point
232
+ logpattern = (:warn , " support for empty collections will be removed since they do not represent proper probability distributions" )
233
+ @test iszero (@test_logs logpattern @inferred (kldivergence (Float64[], Float64[])))
234
+ @test iszero (@test_logs logpattern @inferred (kldivergence (Int[], Int[])))
209
235
210
236
# #### summarystats
211
237
0 commit comments