@@ -17,42 +17,34 @@ env1 = map(e -> [e[1]], env) # just the first row
1717end
1818
1919@testset " Maxnet" begin
20- # test both backends work
21- model_glmnet = Maxnet. maxnet ((p_a), env; features = " lq" , backend = GLMNetBackend ());
22- model_lasso = Maxnet. maxnet ((p_a), env; features = " lq" , backend = LassoBackend ());
23-
24- # test both backends come up with approximately the same result
25- @test all (isapprox .(model_glmnet. coefs, model_lasso. coefs; rtol = 0.1 , atol = 0.1 ))
26- @test Statistics. cor (model_glmnet. coefs, model_lasso. coefs) > 0.99
20+ # some class combinations and keywords
21+ m = Maxnet. maxnet (p_a, env; features = " lq" );
22+ Maxnet. maxnet (p_a, env; features = " lqp" , regularization_multiplier = 2. );
23+ Maxnet. maxnet (p_a, env; features = " lqh" , regularization_multiplier = 5. , nknots = 10 );
24+ Maxnet. maxnet (p_a, env; features = " lqph" , weight_factor = 10. );
2725
2826 # test the result
29- @test model_glmnet. entropy ≈ 6.114650341746531
30- @test complexity (model_glmnet) == 21
31-
32- # some class combinations and keywords
33- Maxnet. maxnet (p_a, env; features = " lq" , backend = LassoBackend ());
34- Maxnet. maxnet (p_a, env; features = " lqp" , regularization_multiplier = 2. , backend = LassoBackend ());
35- Maxnet. maxnet (p_a, env; features = " lqh" , regularization_multiplier = 5. , nknots = 10 , backend = LassoBackend ());
36- Maxnet. maxnet (p_a, env; features = " lqph" , weight_factor = 10. , backend = LassoBackend ());
27+ @test m. entropy ≈ 6.114650341746531
28+ @test complexity (m) == 21
3729
3830 # predictions
39- prediction = Maxnet. predict (model_lasso , env)
31+ prediction = Maxnet. predict (m , env)
4032 @test Statistics. mean (prediction[p_a]) > Statistics. mean (prediction[.~ p_a])
4133 @test minimum (prediction) > 0.
4234 @test maximum (prediction) < 1.
43- @test mean (prediction) ≈ 0.243406167194403 atol= 1e-4
35+ @test mean (prediction) ≈ 0.24375837576014572 atol= 1e-4
4436
4537 # check that clamping works
4638 # clamp shouldn't change anything in this case
47- @test prediction == Maxnet. predict (model_lasso , env; clamp = true )
39+ @test prediction == Maxnet. predict (m , env; clamp = true )
4840
4941 # predict with a crazy extrapolation
5042 env1_extrapolated = merge (env1, (;cld6190_ann = [100_000 ]))
5143 env1_max_cld = merge (env1, (;cld6190_ann = [maximum (env. cld6190_ann)]))
5244
5345 # using clamp the prediction uses the highest cloud
54- @test Maxnet. predict (model_lasso , env1_extrapolated; link = IdentityLink (), clamp = true ) ==
55- Maxnet. predict (model_lasso , env1_max_cld; link = IdentityLink ())
46+ @test Maxnet. predict (m , env1_extrapolated; link = IdentityLink (), clamp = true ) ==
47+ Maxnet. predict (m , env1_max_cld; link = IdentityLink ())
5648end
5749
5850@testset " MLJ" begin
6355 env_typed = MLJBase. coerce (env, cont_keys... )
6456
6557 # make a machine
66- mach1 = machine (mn (features = " lq" , backend = LassoBackend () ), env_typed, categorical (p_a))
58+ mach1 = machine (mn (features = " lq" ), env_typed, categorical (p_a))
6759 fit! (mach1)
6860
69- mach2 = machine (mn (features = " lqph" , backend = GLMNetBackend () ), env_typed, categorical (p_a))
61+ mach2 = machine (mn (features = " lqph" ), env_typed, categorical (p_a))
7062 fit! (mach2)
7163
7264 # make the equivalent model without mlj
73- model = Maxnet. maxnet ((p_a), env_typed; features = " lqph" , backend = GLMNetBackend () );
65+ model = Maxnet. maxnet ((p_a), env_typed; features = " lqph" );
7466
7567
7668 # predict via MLJBase
7769 mljprediction = MLJBase. predict (mach2, env_typed)
7870 mlj_true_probability = pdf .(mljprediction, true )
7971
8072 # test that this predicts the same as the equivalent model without mlj
73+
8174 @test all (Maxnet. predict (model, env_typed) .≈ mlj_true_probability)
8275
8376 @test Statistics. mean (mlj_true_probability[p_a]) > Statistics. mean (mlj_true_probability[.~ p_a])
0 commit comments