diff --git a/.github/codecov.yml b/.github/codecov.yml index c62bedf..4718e32 100644 --- a/.github/codecov.yml +++ b/.github/codecov.yml @@ -1,5 +1,6 @@ coverage: - status: + ignore: + - "src/learners/gradient_descent.jl" project: default: threshold: 0.5% diff --git a/test/learners/dimension_reduction.jl b/test/learners/dimension_reduction.jl index 5d73272..ff2eae7 100644 --- a/test/learners/dimension_reduction.jl +++ b/test/learners/dimension_reduction.jl @@ -11,7 +11,7 @@ U, Vt = r.U, r.Vt X = U*diagm([1, 2, 3, 0.01, 0.01])*Vt learner = LearnTestAPI.TruncatedSVD(codim=2) -@testapi learner X verbosity=0 +@testapi learner X verbosity=1 @testset "extra test for truncated SVD" begin model = @test_logs( diff --git a/test/learners/ensembling.jl b/test/learners/ensembling.jl index 1a1baa1..2464425 100644 --- a/test/learners/ensembling.jl +++ b/test/learners/ensembling.jl @@ -25,7 +25,7 @@ Xtest = Tables.subset(X, test) rng = StableRNG(123) atom = LearnTestAPI.Ridge() learner = LearnTestAPI.Ensemble(atom; n=4, rng) -@testapi learner data verbosity=0 +@testapi learner data verbosity=1 @testset "extra tests for ensemble" begin @test LearnAPI.clone(learner) == learner diff --git a/test/learners/regression.jl b/test/learners/regression.jl index ecaaef3..ae33e63 100644 --- a/test/learners/regression.jl +++ b/test/learners/regression.jl @@ -20,7 +20,7 @@ data = (X, y) # # RIDGE learner = LearnTestAPI.Ridge(lambda=0.5) -@testapi learner data verbosity=0 +@testapi learner data verbosity=1 @testset "extra tests for ridge regression" begin @test :(LearnAPI.obs) in LearnAPI.functions(learner) @@ -82,7 +82,7 @@ end # # BABY RIDGE learner = LearnTestAPI.BabyRidge(lambda=0.5) -@testapi learner data verbosity=0 +@testapi learner data verbosity=1 @testset "extra tests for baby ridge" begin model = fit(learner, Tables.subset(X, train), y[train]; verbosity=0) diff --git a/test/learners/static_algorithms.jl b/test/learners/static_algorithms.jl index eeae3b6..26e21c5 100644 --- a/test/learners/static_algorithms.jl +++ b/test/learners/static_algorithms.jl @@ -9,7 +9,7 @@ import DataFrames learner = LearnTestAPI.Selector(names=[:x, :w]) X = DataFrames.DataFrame(rand(3, 4), [:x, :y, :z, :w]) -@testapi learner X verbosity=0 +@testapi learner X verbosity=1 @testset "test a static transformer" begin model = fit(learner) # no data arguments! @@ -23,7 +23,7 @@ end learner = LearnTestAPI.FancySelector(names=[:x, :w]) X = DataFrames.DataFrame(rand(3, 4), [:x, :y, :z, :w]) -@testapi learner X verbosity=0 +@testapi learner X verbosity=1 @testset "test a variation that reports byproducts" begin model = fit(learner) # no data arguments!