From c462649d78d7f6289004794968ad252cc72138e6 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Mon, 7 Apr 2025 23:13:18 +0000 Subject: [PATCH 1/6] introduce AbtractPopMember to extend PopMember This commit introduces a new abstract type () for Population Members. This allows us to track additional information for each PopMember item. For instance, if we want to track the type of mutation that contributes most to each PopMember's performance. --- src/ConstantOptimization.jl | 6 ++--- src/ExpressionBuilder.jl | 4 ++-- src/HallOfFame.jl | 10 +++++--- src/Migration.jl | 4 ++-- src/Mutate.jl | 46 ++++++++++++++++++------------------- src/PopMember.jl | 22 ++++++++++-------- src/Population.jl | 19 +++++++++------ src/SymbolicRegression.jl | 3 ++- src/TemplateExpression.jl | 4 ++-- 9 files changed, 65 insertions(+), 53 deletions(-) diff --git a/src/ConstantOptimization.jl b/src/ConstantOptimization.jl index deab6387a..c4f4365f8 100644 --- a/src/ConstantOptimization.jl +++ b/src/ConstantOptimization.jl @@ -15,11 +15,11 @@ using ..CoreModule: AbstractOptions, Dataset, DATA_TYPE, LOSS_TYPE, specialized_options, dataset_fraction using ..UtilsModule: get_birth_order using ..LossFunctionsModule: eval_loss, loss_to_cost -using ..PopMemberModule: PopMember +using ..PopMemberModule: AbstractPopMember function optimize_constants( dataset::Dataset{T,L}, member::P, options::AbstractOptions -)::Tuple{P,Float64} where {T<:DATA_TYPE,L<:LOSS_TYPE,P<:PopMember{T,L}} +)::Tuple{P,Float64} where {T<:DATA_TYPE,L<:LOSS_TYPE,P<:AbstractPopMember{T,L}} nconst = count_constants_for_optimization(member.tree) nconst == 0 && return (member, 0.0) if nconst == 1 && !(T <: Complex) @@ -48,7 +48,7 @@ count_constants_for_optimization(ex::Expression) = count_scalar_constants(ex) function _optimize_constants( dataset, member::P, options, algorithm, optimizer_options -)::Tuple{P,Float64} where {T,L,P<:PopMember{T,L}} +)::Tuple{P,Float64} where {T,L,P<:AbstractPopMember{T,L}} tree = member.tree eval_fraction = dataset_fraction(dataset) x0, refs = get_scalar_constants(tree) diff --git a/src/ExpressionBuilder.jl b/src/ExpressionBuilder.jl index db6f5e82b..a9c10152b 100644 --- a/src/ExpressionBuilder.jl +++ b/src/ExpressionBuilder.jl @@ -11,7 +11,7 @@ using DynamicExpressions: using ..CoreModule: AbstractOptions, Dataset using ..HallOfFameModule: HallOfFame using ..PopulationModule: Population -using ..PopMemberModule: PopMember +using ..PopMemberModule: PopMember, AbstractPopMember import DynamicExpressions: get_operators import ..CoreModule: create_expression @@ -135,7 +135,7 @@ end end function embed_metadata( vec::Vector{H}, options::AbstractOptions, dataset::Dataset{T,L} - ) where {T,L,H<:Union{HallOfFame,Population,PopMember}} + ) where {T,L,PM<:AbstractPopMember,H<:Union{HallOfFame,Population,PM}} return map(Fix{2}(Fix{3}(embed_metadata, dataset), options), vec) end end diff --git a/src/HallOfFame.jl b/src/HallOfFame.jl index 7d870f78b..d3e075898 100644 --- a/src/HallOfFame.jl +++ b/src/HallOfFame.jl @@ -5,7 +5,7 @@ using DynamicExpressions: AbstractExpression, string_tree using ..UtilsModule: split_string, AnnotatedIOBuffer, dump_buffer using ..CoreModule: AbstractOptions, Dataset, DATA_TYPE, LOSS_TYPE, relu, create_expression using ..ComplexityModule: compute_complexity -using ..PopMemberModule: PopMember +using ..PopMemberModule: AbstractPopMember, PopMember using ..InterfaceDynamicExpressionsModule: format_dimensions, WILDCARD_UNIT_STRING using Printf: @sprintf @@ -23,7 +23,7 @@ have been set, you can run `.members[exists]`. - `exists::Array{Bool,1}`: Whether the member at the given complexity has been set. """ struct HallOfFame{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} - members::Array{PopMember{T,L,N},1} + members::Array{<:AbstractPopMember{T,L,N},1} exists::Array{Bool,1} #Whether it has been set end function Base.show(io::IO, mime::MIME"text/plain", hof::HallOfFame{T,L,N}) where {T,L,N} @@ -91,7 +91,11 @@ end """ function calculate_pareto_frontier(hallOfFame::HallOfFame{T,L,N}) where {T,L,N} # TODO - remove dataset from args. - P = PopMember{T,L,N} + P = if length(hallOfFame.members) > 0 + typeof(hallOfFame.members[1]) + else + PopMember{T,L,N} + end # Dominating pareto curve - must be better than all simpler equations dominating = P[] for size in eachindex(hallOfFame.members) diff --git a/src/Migration.jl b/src/Migration.jl index f7fe61b89..d2446954f 100644 --- a/src/Migration.jl +++ b/src/Migration.jl @@ -2,7 +2,7 @@ module MigrationModule using ..CoreModule: AbstractOptions using ..PopulationModule: Population -using ..PopMemberModule: PopMember, reset_birth! +using ..PopMemberModule: AbstractPopMember, reset_birth! using ..UtilsModule: poisson_sample """ @@ -14,7 +14,7 @@ to do so. The original migrant population is not modified. Pass with, e.g., """ function migrate!( migration::Pair{Vector{PM},P}, options::AbstractOptions; frac::AbstractFloat -) where {T,L,N,PM<:PopMember{T,L,N},P<:Population{T,L,N}} +) where {T,L,N,PM<:AbstractPopMember{T,L,N},P<:Population{T,L,N}} base_pop = migration.second population_size = length(base_pop.members) mean_number_replaced = population_size * frac diff --git a/src/Mutate.jl b/src/Mutate.jl index f1e935dd3..8c81339f3 100644 --- a/src/Mutate.jl +++ b/src/Mutate.jl @@ -22,7 +22,7 @@ using ..ComplexityModule: compute_complexity using ..LossFunctionsModule: eval_cost using ..CheckConstraintsModule: check_constraints using ..AdaptiveParsimonyModule: RunningSearchStatistics -using ..PopMemberModule: PopMember +using ..PopMemberModule: AbstractPopMember, PopMember using ..MutationFunctionsModule: mutate_constant, mutate_operator, @@ -39,10 +39,10 @@ using ..MutationFunctionsModule: using ..ConstantOptimizationModule: optimize_constants using ..RecorderModule: @recorder -abstract type AbstractMutationResult{N<:AbstractExpression,P<:PopMember} end +abstract type AbstractMutationResult{N<:AbstractExpression,P<:AbstractPopMember} end """ - MutationResult{N<:AbstractExpression,P<:PopMember} + MutationResult{N<:AbstractExpression,P<:AbstractPopMember} Represents the result of a mutation operation in the genetic programming algorithm. This struct is used to return values from `mutate!` functions. @@ -60,7 +60,7 @@ This struct encapsulates the result of a mutation operation. Either a new expres Return the `member` if you want to return immediately, and have computed the loss value as part of the mutation. """ -struct MutationResult{N<:AbstractExpression,P<:PopMember} <: AbstractMutationResult{N,P} +struct MutationResult{N<:AbstractExpression,P<:AbstractPopMember} <: AbstractMutationResult{N,P} tree::Union{N,Nothing} member::Union{P,Nothing} num_evals::Float64 @@ -72,7 +72,7 @@ struct MutationResult{N<:AbstractExpression,P<:PopMember} <: AbstractMutationRes member::Union{_P,Nothing}=nothing, num_evals::Float64=0.0, return_immediately::Bool=false, - ) where {_N<:AbstractExpression,_P<:PopMember} + ) where {_N<:AbstractExpression,_P<:AbstractPopMember} @assert( (tree === nothing) ⊻ (member === nothing), "Mutation result must return either a tree or a pop member, not both" @@ -98,7 +98,7 @@ Note that the weights were already copied, so you don't need to worry about muta """ function condition_mutation_weights!( weights::AbstractMutationWeights, member::P, options::AbstractOptions, curmaxsize::Int -) where {T,L,N<:AbstractExpression,P<:PopMember{T,L,N}} +) where {T,L,N<:AbstractExpression,P<:AbstractPopMember{T,L,N}} tree = get_tree(member.tree) if !preserve_sharing(typeof(member.tree)) weights.form_connection = 0.0 @@ -168,7 +168,7 @@ end tmp_recorder::RecordType, )::Tuple{ P,Bool,Float64 -} where {T,L,D<:Dataset{T,L},N<:AbstractExpression{T},P<:PopMember{T,L,N}} +} where {T,L,D<:Dataset{T,L},N<:AbstractExpression{T},P<:AbstractPopMember{T,L,N}} parent_ref = member.ref num_evals = 0.0 @@ -372,7 +372,7 @@ end mutation_weights::AbstractMutationWeights, options::AbstractOptions; kws..., - ) where {N<:AbstractExpression,P<:PopMember,S} + ) where {N<:AbstractExpression,P<:AbstractPopMember,S} Perform a mutation on the given `tree` and `member` using the specified mutation type `S`. Various `kws` are provided to access other data needed for some mutations. @@ -400,7 +400,7 @@ so it can always return immediately. """ function mutate!( ::N, ::P, ::Val{S}, ::AbstractMutationWeights, ::AbstractOptions; kws... -) where {N<:AbstractExpression,P<:PopMember,S} +) where {N<:AbstractExpression,P<:AbstractPopMember,S} return error("Unknown mutation choice: $S") end @@ -413,7 +413,7 @@ function mutate!( recorder::RecordType, temperature, kws..., -) where {N<:AbstractExpression,P<:PopMember} +) where {N<:AbstractExpression,P<:AbstractPopMember} tree = mutate_constant(tree, temperature, options) @recorder recorder["type"] = "mutate_constant" return MutationResult{N,P}(; tree=tree) @@ -427,7 +427,7 @@ function mutate!( options::AbstractOptions; recorder::RecordType, kws..., -) where {N<:AbstractExpression,P<:PopMember} +) where {N<:AbstractExpression,P<:AbstractPopMember} tree = mutate_operator(tree, options) @recorder recorder["type"] = "mutate_operator" return MutationResult{N,P}(; tree=tree) @@ -441,7 +441,7 @@ function mutate!( options::AbstractOptions; recorder::RecordType, kws..., -) where {N<:AbstractExpression,P<:PopMember} +) where {N<:AbstractExpression,P<:AbstractPopMember} tree = swap_operands(tree) @recorder recorder["type"] = "swap_operands" return MutationResult{N,P}(; tree=tree) @@ -456,7 +456,7 @@ function mutate!( recorder::RecordType, nfeatures, kws..., -) where {N<:AbstractExpression,P<:PopMember} +) where {N<:AbstractExpression,P<:AbstractPopMember} if rand() < 0.5 tree = append_random_op(tree, options, nfeatures) @recorder recorder["type"] = "add_node:append" @@ -476,7 +476,7 @@ function mutate!( recorder::RecordType, nfeatures, kws..., -) where {N<:AbstractExpression,P<:PopMember} +) where {N<:AbstractExpression,P<:AbstractPopMember} tree = insert_random_op(tree, options, nfeatures) @recorder recorder["type"] = "insert_node" return MutationResult{N,P}(; tree=tree) @@ -491,7 +491,7 @@ function mutate!( recorder::RecordType, nfeatures, kws..., -) where {N<:AbstractExpression,P<:PopMember} +) where {N<:AbstractExpression,P<:AbstractPopMember} tree = delete_random_op!(tree, options, nfeatures) @recorder recorder["type"] = "delete_node" return MutationResult{N,P}(; tree=tree) @@ -505,7 +505,7 @@ function mutate!( options::AbstractOptions; recorder::RecordType, kws..., -) where {N<:AbstractExpression,P<:PopMember} +) where {N<:AbstractExpression,P<:AbstractPopMember} tree = form_random_connection!(tree) @recorder recorder["type"] = "form_connection" return MutationResult{N,P}(; tree=tree) @@ -519,7 +519,7 @@ function mutate!( options::AbstractOptions; recorder::RecordType, kws..., -) where {N<:AbstractExpression,P<:PopMember} +) where {N<:AbstractExpression,P<:AbstractPopMember} tree = break_random_connection!(tree) @recorder recorder["type"] = "break_connection" return MutationResult{N,P}(; tree=tree) @@ -533,7 +533,7 @@ function mutate!( options::AbstractOptions; recorder::RecordType, kws..., -) where {N<:AbstractExpression,P<:PopMember} +) where {N<:AbstractExpression,P<:AbstractPopMember} tree = randomly_rotate_tree!(tree) @recorder recorder["type"] = "rotate_tree" return MutationResult{N,P}(; tree=tree) @@ -549,7 +549,7 @@ function mutate!( recorder::RecordType, parent_ref, kws..., -) where {N<:AbstractExpression,P<:PopMember} +) where {N<:AbstractExpression,P<:AbstractPopMember} @assert options.should_simplify simplify_tree!(tree, options.operators) tree = combine_operators(tree, options.operators) @@ -577,7 +577,7 @@ function mutate!( curmaxsize, nfeatures, kws..., -) where {T,N<:AbstractExpression{T},P<:PopMember} +) where {T,N<:AbstractExpression{T},P<:AbstractPopMember} tree = randomize_tree(tree, curmaxsize, options, nfeatures) @recorder recorder["type"] = "randomize" return MutationResult{N,P}(; tree=tree) @@ -592,7 +592,7 @@ function mutate!( recorder::RecordType, dataset::Dataset, kws..., -) where {N<:AbstractExpression,P<:PopMember} +) where {N<:AbstractExpression,P<:AbstractPopMember} cur_member, new_num_evals = optimize_constants(dataset, member, options) @recorder recorder["type"] = "optimize" return MutationResult{N,P}(; @@ -609,7 +609,7 @@ function mutate!( recorder::RecordType, parent_ref, kws..., -) where {N<:AbstractExpression,P<:PopMember} +) where {N<:AbstractExpression,P<:AbstractPopMember} @recorder begin recorder["type"] = "identity" recorder["result"] = "accept" @@ -637,7 +637,7 @@ function crossover_generation( curmaxsize::Int, options::AbstractOptions; recorder::RecordType=RecordType(), -)::Tuple{P,P,Bool,Float64} where {T,L,D<:Dataset{T,L},N,P<:PopMember{T,L,N}} +)::Tuple{P,P,Bool,Float64} where {T,L,D<:Dataset{T,L},N,P<:AbstractPopMember{T,L,N}} tree1 = member1.tree tree2 = member2.tree crossover_accepted = false diff --git a/src/PopMember.jl b/src/PopMember.jl index bd195a6c2..ec94cc15d 100644 --- a/src/PopMember.jl +++ b/src/PopMember.jl @@ -7,8 +7,10 @@ import ..ComplexityModule: compute_complexity using ..UtilsModule: get_birth_order using ..LossFunctionsModule: eval_cost +abstract type AbstractPopMember{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} end + # Define a member of population by equation, cost, and age -mutable struct PopMember{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} +mutable struct PopMember{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} <: AbstractPopMember{T,L,N} tree::N cost::L # Inludes complexity penalty, normalization loss::L # Raw loss @@ -19,7 +21,7 @@ mutable struct PopMember{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} ref::Int parent::Int end -@inline function Base.setproperty!(member::PopMember, field::Symbol, value) +@inline function Base.setproperty!(member::M, field::Symbol, value) where {M<:AbstractPopMember} if field == :complexity throw( error("Don't set `.complexity` directly. Use `recompute_complexity!` instead.") @@ -34,7 +36,7 @@ end end return setfield!(member, field, value) end -@unstable @inline function Base.getproperty(member::PopMember, field::Symbol) +@unstable @inline function Base.getproperty(member::M, field::Symbol) where {M<:AbstractPopMember} if field == :complexity throw( error("Don't access `.complexity` directly. Use `compute_complexity` instead.") @@ -47,7 +49,7 @@ end end return getfield(member, field) end -function Base.show(io::IO, p::PopMember{T,L,N}) where {T,L,N} +function Base.show(io::IO, p::M) where {T,L,N,M<:PopMember{T,L,N}} shower(x) = sprint(show, x) print(io, "PopMember(") print(io, "tree = (", string_tree(p.tree), "), ") @@ -145,7 +147,7 @@ function PopMember( ) end -function Base.copy(p::P) where {P<:PopMember} +function Base.copy(p::P) where {P<:AbstractPopMember} tree = copy(p.tree) cost = copy(p.cost) loss = copy(p.loss) @@ -156,23 +158,23 @@ function Base.copy(p::P) where {P<:PopMember} return P(tree, cost, loss, birth, complexity, ref, parent) end -function reset_birth!(p::PopMember; deterministic::Bool) +function reset_birth!(p::M; deterministic::Bool) where {M<:AbstractPopMember} p.birth = get_birth_order(; deterministic) return p end # Can read off complexity directly from pop members function compute_complexity( - member::PopMember, options::AbstractOptions; break_sharing=Val(false) -)::Int + member::M, options::AbstractOptions; break_sharing=Val(false) +)::Int where {M<:AbstractPopMember} complexity = getfield(member, :complexity) complexity == -1 && return recompute_complexity!(member, options; break_sharing) # TODO: Turn this into a warning, and then return normal compute_complexity instead. return complexity end function recompute_complexity!( - member::PopMember, options::AbstractOptions; break_sharing=Val(false) -)::Int + member::M, options::AbstractOptions; break_sharing=Val(false) +)::Int where {M<:AbstractPopMember} complexity = compute_complexity(member.tree, options; break_sharing) setfield!(member, :complexity, complexity) return complexity diff --git a/src/Population.jl b/src/Population.jl index 739ca828e..e9d2986ac 100644 --- a/src/Population.jl +++ b/src/Population.jl @@ -8,12 +8,12 @@ using ..ComplexityModule: compute_complexity using ..LossFunctionsModule: eval_cost, update_baseline_loss! using ..AdaptiveParsimonyModule: RunningSearchStatistics using ..MutationFunctionsModule: gen_random_tree -using ..PopMemberModule: PopMember +using ..PopMemberModule: PopMember, AbstractPopMember using ..UtilsModule: bottomk_fast, argmin_fast, PerTaskCache # A list of members of the population, with easy constructors, # which allow for random generation of new populations -struct Population{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} - members::Array{PopMember{T,L,N},1} +struct Population{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T},PM<:AbstractPopMember{T,L,N}} + members::Array{PM,1} n::Int end """ @@ -21,7 +21,7 @@ end Create population from list of PopMembers. """ -function Population(pop::Vector{<:PopMember}) +function Population(pop::Vector{<:AbstractPopMember{T,L}}) where {T<:DATA_TYPE,L<:LOSS_TYPE} return Population(pop, size(pop, 1)) end @@ -91,7 +91,12 @@ Create random population and score them on the dataset. end function Base.copy(pop::P)::P where {T,L,N,P<:Population{T,L,N}} - copied_members = Vector{PopMember{T,L,N}}(undef, pop.n) + PM = if length(pop.members) > 0 + typeof(pop.members[1]) + else + AbstractPopMember{T,L,N} + end + copied_members = Vector{PM}(undef, pop.n) Threads.@threads for i in 1:(pop.n) copied_members[i] = copy(pop.members[i]) end @@ -118,7 +123,7 @@ function _best_of_sample( members::Vector{P}, running_search_statistics::RunningSearchStatistics, options::AbstractOptions, -) where {T,L,P<:PopMember{T,L}} +) where {T,L,P<:AbstractPopMember{T,L}} p = options.tournament_selection_p n = length(members) # == tournament_selection_n adjusted_costs = Vector{L}(undef, n) @@ -157,7 +162,7 @@ function _best_of_sample( end return members[chosen_idx] end -_get_cost(member::PopMember) = member.cost +_get_cost(member::AbstractPopMember) = member.cost const CACHED_WEIGHTS = let init_k = collect(0:5), diff --git a/src/SymbolicRegression.jl b/src/SymbolicRegression.jl index 5fcf57f63..77fb0e2ba 100644 --- a/src/SymbolicRegression.jl +++ b/src/SymbolicRegression.jl @@ -2,6 +2,7 @@ module SymbolicRegression # Types export Population, + AbstractPopMember, PopMember, HallOfFame, Options, @@ -295,7 +296,7 @@ using .MutationFunctionsModule: crossover_trees using .InterfaceDynamicExpressionsModule: @extend_operators using .LossFunctionsModule: eval_loss, eval_cost, update_baseline_loss!, score_func -using .PopMemberModule: PopMember, reset_birth! +using .PopMemberModule: AbstractPopMember, PopMember, reset_birth! using .PopulationModule: Population, best_sub_pop, record_population, best_of_sample using .HallOfFameModule: HallOfFame, calculate_pareto_frontier, string_dominating_pareto_curve diff --git a/src/TemplateExpression.jl b/src/TemplateExpression.jl index cc38904f7..4cdcb696a 100644 --- a/src/TemplateExpression.jl +++ b/src/TemplateExpression.jl @@ -52,7 +52,7 @@ using ..CheckConstraintsModule: CheckConstraintsModule as CC using ..ComplexityModule: ComplexityModule using ..LossFunctionsModule: LossFunctionsModule as LF using ..MutateModule: MutateModule as MM -using ..PopMemberModule: PopMember +using ..PopMemberModule: PopMember, AbstractPopMember using ..ComposableExpressionModule: ComposableExpression, ValidVector struct ParamVector{T} <: AbstractVector{T} @@ -692,7 +692,7 @@ function MM.condition_mutation_weights!( @nospecialize(member::P), @nospecialize(options::AbstractOptions), curmaxsize::Int, -) where {T,L,N<:TemplateExpression,P<:PopMember{T,L,N}} +) where {T,L,N<:TemplateExpression,P<:AbstractPopMember{T,L,N}} if !preserve_sharing(typeof(member.tree)) weights.form_connection = 0.0 weights.break_connection = 0.0 From d2db18373c2de9bdd6789fdb5b3f8ab31aa6d68d Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Tue, 8 Apr 2025 08:23:16 +0000 Subject: [PATCH 2/6] hacky working. UNDO COMMIT --- src/Mutate.jl | 8 ++++---- src/ParametricExpression.jl | 6 +++--- src/PopMember.jl | 1 + src/Population.jl | 28 +++++++++++++++------------- src/SearchUtils.jl | 4 ++-- src/TemplateExpression.jl | 4 ++-- 6 files changed, 27 insertions(+), 24 deletions(-) diff --git a/src/Mutate.jl b/src/Mutate.jl index 8c81339f3..829d2704d 100644 --- a/src/Mutate.jl +++ b/src/Mutate.jl @@ -146,10 +146,10 @@ Use this to modify how `mutate_constant` changes for an expression type. function condition_mutate_constant!( ::Type{<:AbstractExpression}, weights::AbstractMutationWeights, - member::PopMember, + member::PM, options::AbstractOptions, curmaxsize::Int, -) +) where {PM<:AbstractPopMember} n_constants = count_scalar_constants(member.tree) weights.mutate_constant *= min(8, n_constants) / 8.0 @@ -343,12 +343,12 @@ end @generated function _dispatch_mutations!( tree::AbstractExpression, - member::PopMember, + member::PM, mutation_choice::Symbol, weights::W, options::AbstractOptions; kws..., -) where {W<:AbstractMutationWeights} +) where {W<:AbstractMutationWeights, PM<:AbstractPopMember} mutation_choices = fieldnames(W) quote Base.Cartesian.@nif( diff --git a/src/ParametricExpression.jl b/src/ParametricExpression.jl index cf8eff752..6b5f4e61e 100644 --- a/src/ParametricExpression.jl +++ b/src/ParametricExpression.jl @@ -24,7 +24,7 @@ using ..CoreModule: AbstractExpressionSpec, get_indices, ExpressionSpecModule as ES -using ..PopMemberModule: PopMember +using ..PopMemberModule: AbstractPopMember, PopMember using ..InterfaceDynamicExpressionsModule: InterfaceDynamicExpressionsModule as IDE using ..LossFunctionsModule: LossFunctionsModule as LF using ..ExpressionBuilderModule: ExpressionBuilderModule as EB @@ -102,10 +102,10 @@ end function MM.condition_mutate_constant!( ::Type{<:ParametricExpression}, weights::AbstractMutationWeights, - member::PopMember, + member::PM, options::AbstractOptions, curmaxsize::Int, -) +) where {PM<:AbstractPopMember} # Avoid modifying the mutate_constant weight, since # otherwise we would be mutating constants all the time! return nothing diff --git a/src/PopMember.jl b/src/PopMember.jl index ec94cc15d..1c6b143a1 100644 --- a/src/PopMember.jl +++ b/src/PopMember.jl @@ -180,4 +180,5 @@ function recompute_complexity!( return complexity end + end diff --git a/src/Population.jl b/src/Population.jl index e9d2986ac..a9abaf272 100644 --- a/src/Population.jl +++ b/src/Population.jl @@ -12,8 +12,8 @@ using ..PopMemberModule: PopMember, AbstractPopMember using ..UtilsModule: bottomk_fast, argmin_fast, PerTaskCache # A list of members of the population, with easy constructors, # which allow for random generation of new populations -struct Population{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T},PM<:AbstractPopMember{T,L,N}} - members::Array{PM,1} +struct Population{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} + members::Array{<:AbstractPopMember{T,L,N},1} n::Int end """ @@ -206,19 +206,21 @@ function best_sub_pop(pop::P; topn::Int=10)::P where {P<:Population} return Population(pop.members[best_idx[1:topn]]) end +function generate_record(member::PopMember, options::AbstractOptions)::RecordType + return RecordType( + "tree" => string_tree(member.tree, options; pretty=false), + "loss" => member.loss, + "cost" => member.cost, + "complexity" => compute_complexity(member, options), + "birth" => member.birth, + "ref" => member.ref, + "parent" => member.parent, + ) +end + function record_population(pop::Population, options::AbstractOptions)::RecordType return RecordType( - "population" => [ - RecordType( - "tree" => string_tree(member.tree, options; pretty=false), - "loss" => member.loss, - "cost" => member.cost, - "complexity" => compute_complexity(member, options), - "birth" => member.birth, - "ref" => member.ref, - "parent" => member.parent, - ) for member in pop.members - ], + "population" => [generate_record(member, options) for member in pop.members], "time" => time(), ) end diff --git a/src/SearchUtils.jl b/src/SearchUtils.jl index 266d0b97a..3d081a0ac 100644 --- a/src/SearchUtils.jl +++ b/src/SearchUtils.jl @@ -16,7 +16,7 @@ using ..UtilsModule: subscriptify using ..CoreModule: Dataset, AbstractOptions, Options, RecordType, max_features using ..ComplexityModule: compute_complexity using ..PopulationModule: Population -using ..PopMemberModule: PopMember +using ..PopMemberModule: AbstractPopMember, PopMember using ..HallOfFameModule: HallOfFame, string_dominating_pareto_curve using ..ProgressBarsModule: WrappedProgressBar, manually_iterate!, barlen using ..AdaptiveParsimonyModule: RunningSearchStatistics @@ -678,7 +678,7 @@ end function update_hall_of_fame!( hall_of_fame::HallOfFame, members::Vector{PM}, options::AbstractOptions -) where {PM<:PopMember} +) where {PM<:AbstractPopMember} for member in members size = compute_complexity(member, options) valid_size = 0 < size <= options.maxsize diff --git a/src/TemplateExpression.jl b/src/TemplateExpression.jl index 4cdcb696a..6d215c869 100644 --- a/src/TemplateExpression.jl +++ b/src/TemplateExpression.jl @@ -764,10 +764,10 @@ end function MM.condition_mutate_constant!( ::Type{<:TemplateExpression}, weights::AbstractMutationWeights, - member::PopMember, + member::PM, options::AbstractOptions, curmaxsize::Int, -) +) where {PM<:AbstractPopMember} # Avoid modifying the mutate_constant weight, since # otherwise we would be mutating constants all the time! return nothing From a32fa9f7b0403567060052d8add8569c67093616 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Mon, 21 Apr 2025 06:13:42 +0000 Subject: [PATCH 3/6] [Draft] Weakening some typing constraints to allow passing an AbstractPopMember around. Long-term fix seems to be to move information in the metadata field of expression instead. --- src/ExpressionBuilder.jl | 20 +++--- src/HallOfFame.jl | 148 +++++++++++++++++++++----------------- src/PopMember.jl | 2 +- src/SearchUtils.jl | 6 +- src/SymbolicRegression.jl | 24 ++++--- 5 files changed, 111 insertions(+), 89 deletions(-) diff --git a/src/ExpressionBuilder.jl b/src/ExpressionBuilder.jl index a9c10152b..4c1142eab 100644 --- a/src/ExpressionBuilder.jl +++ b/src/ExpressionBuilder.jl @@ -134,8 +134,10 @@ end ) end function embed_metadata( - vec::Vector{H}, options::AbstractOptions, dataset::Dataset{T,L} - ) where {T,L,PM<:AbstractPopMember,H<:Union{HallOfFame,Population,PM}} + vec::Vector{<:Union{HallOfFame,Population,AbstractPopMember}}, + options::AbstractOptions, + dataset::Dataset{T,L} + ) where {T,L} return map(Fix{2}(Fix{3}(embed_metadata, dataset), options), vec) end end @@ -153,8 +155,8 @@ function strip_metadata( return with_metadata(ex; init_params(options, dataset, ex, Val(false))...) end function strip_metadata( - member::PopMember, options::AbstractOptions, dataset::Dataset{T,L} -) where {T,L} + member::PM, options::AbstractOptions, dataset::Dataset{T,L} +) where {T,L,PM<:PopMember{T,L}} return PopMember( strip_metadata(member.tree, options, dataset), member.cost, @@ -165,14 +167,14 @@ function strip_metadata( deterministic=options.deterministic, ) end -function strip_metadata( - pop::Population, options::AbstractOptions, dataset::Dataset{T,L} -) where {T,L} +@unstable function strip_metadata( + pop::P, options::AbstractOptions, dataset::Dataset{T,L} +) where {T,L,P<:Population{T,L}} return Population(map(member -> strip_metadata(member, options, dataset), pop.members)) end function strip_metadata( - hof::HallOfFame, options::AbstractOptions, dataset::Dataset{T,L} -) where {T,L} + hof::H, options::AbstractOptions, dataset::Dataset{T,L} +) where {T,L,N,PM<:AbstractPopMember,H<:HallOfFame{T,L,N,PM}} return HallOfFame( map(member -> strip_metadata(member, options, dataset), hof.members), hof.exists ) diff --git a/src/HallOfFame.jl b/src/HallOfFame.jl index d3e075898..b26fadac1 100644 --- a/src/HallOfFame.jl +++ b/src/HallOfFame.jl @@ -1,6 +1,7 @@ module HallOfFameModule -using StyledStrings: @styled_str +using DispatchDoctor: @unstable +using StyledStrings: styled using DynamicExpressions: AbstractExpression, string_tree using ..UtilsModule: split_string, AnnotatedIOBuffer, dump_buffer using ..CoreModule: AbstractOptions, Dataset, DATA_TYPE, LOSS_TYPE, relu, create_expression @@ -22,8 +23,8 @@ have been set, you can run `.members[exists]`. These are ordered by complexity, with `.members[1]` the member with complexity 1. - `exists::Array{Bool,1}`: Whether the member at the given complexity has been set. """ -struct HallOfFame{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} - members::Array{<:AbstractPopMember{T,L,N},1} +struct HallOfFame{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T},PM<:AbstractPopMember{T,L,N}} + members::Array{PM,1} exists::Array{Bool,1} #Whether it has been set end function Base.show(io::IO, mime::MIME"text/plain", hof::HallOfFame{T,L,N}) where {T,L,N} @@ -65,7 +66,7 @@ function HallOfFame( ) where {T<:DATA_TYPE,L<:LOSS_TYPE} base_tree = create_expression(zero(T), options, dataset) - return HallOfFame{T,L,typeof(base_tree)}( + return HallOfFame{T,L,typeof(base_tree), PopMember{T,L,typeof(base_tree)}}( [ PopMember( copy(base_tree), @@ -89,15 +90,10 @@ end """ calculate_pareto_frontier(hallOfFame::HallOfFame{T,L,P}) where {T<:DATA_TYPE,L<:LOSS_TYPE} """ -function calculate_pareto_frontier(hallOfFame::HallOfFame{T,L,N}) where {T,L,N} +@unstable function calculate_pareto_frontier(hallOfFame::HallOfFame{T,L,N}) where {T,L,N} # TODO - remove dataset from args. - P = if length(hallOfFame.members) > 0 - typeof(hallOfFame.members[1]) - else - PopMember{T,L,N} - end # Dominating pareto curve - must be better than all simpler equations - dominating = P[] + dominating = similar(hallOfFame.members, 0) for size in eachindex(hallOfFame.members) if !hallOfFame.exists[size] continue @@ -123,49 +119,53 @@ function calculate_pareto_frontier(hallOfFame::HallOfFame{T,L,N}) where {T,L,N} return dominating end -const HEADER = let - join( - ( - rpad(styled"{bold:{underline:Complexity}}", 10), - rpad(styled"{bold:{underline:Loss}}", 9), - rpad(styled"{bold:{underline:Score}}", 9), - styled"{bold:{underline:Equation}}", - ), - " ", - ) -end +# const HEADER = let +# join( +# ( +# rpad(styled"{bold:{underline:Complexity}}", 10), +# rpad(styled"{bold:{underline:Loss}}", 9), +# rpad(styled"{bold:{underline:Score}}", 9), +# styled"{bold:{underline:Equation}}", +# ), +# " ", +# ) +# end + +_fmt(x::Integer) = @sprintf("%-10d", x) +_fmt(x::AbstractFloat) = @sprintf("%-8.3e", x) +_fmt(x) = rpad(string(x), 12) # fallback function string_dominating_pareto_curve( hallOfFame, dataset, options; width::Union{Integer,Nothing}=nothing, pretty::Bool=true ) terminal_width = (width === nothing) ? 100 : max(100, width::Integer) + formatted = format_hall_of_fame(hallOfFame, options) + stat_cols = collect(propertynames(formatted)) + filter!(c -> c ≠ :trees, stat_cols) + priority = [:complexity, :loss, :score] + stat_cols = vcat(intersect(priority, stat_cols), + setdiff(stat_cols, priority)) + header_cells = [rpad(styled("{bold:{underline:$(titlecase(string(c)))}}"), 12) for c in stat_cols] + push!(header_cells, styled("{bold:{underline:Equation}}")) + header = join(header_cells, " ") + _buffer = IOBuffer() buffer = AnnotatedIOBuffer(_buffer) println(buffer, '─'^(terminal_width - 1)) - println(buffer, HEADER) - - formatted = format_hall_of_fame(hallOfFame, options) - for (tree, score, loss, complexity) in - zip(formatted.trees, formatted.scores, formatted.losses, formatted.complexities) - eqn_string = string_tree( - tree, - options; - display_variable_names=dataset.display_variable_names, - X_sym_units=dataset.X_sym_units, - y_sym_units=dataset.y_sym_units, - pretty, - ) - prefix = make_prefix(tree, options, dataset) - eqn_string = prefix * eqn_string - stats_columns_string = @sprintf("%-10d %-8.3e %-8.3e ", complexity, loss, score) - left_cols_width = length(stats_columns_string) - print(buffer, stats_columns_string) - print( - buffer, - wrap_equation_string( - eqn_string, left_cols_width + length(prefix), terminal_width - ), - ) + println(buffer, header) + for i in 1:length(formatted.trees) + stats = join((_fmt(getfield(formatted, c)[i]) for c in stat_cols), " ") + print(buffer, stats) + eqn = string_tree(formatted.trees[i], options; + display_variable_names = dataset.display_variable_names, + X_sym_units = dataset.X_sym_units, + y_sym_units = dataset.y_sym_units, + pretty) + prefix = make_prefix(formatted.trees[i], options, dataset) + print(buffer, + wrap_equation_string(prefix * eqn, + length(stats) + length(prefix) + 2, + terminal_width)) end print(buffer, '─'^(terminal_width - 1)) return dump_buffer(buffer) @@ -207,7 +207,9 @@ function wrap_equation_string(eqn_string, left_cols_width, terminal_width) return dump_buffer(buffer) end -function format_hall_of_fame(hof::HallOfFame{T,L}, options) where {T,L} +@unstable function format_hall_of_fame(hof::HallOfFame{T,L,N,PM}, options; + columns::Union{Vector{Symbol},Nothing}=[:losses, :complexities, :scores, :trees] + ) where {T,L,N,PM<:PopMember{T,L,N}} dominating = calculate_pareto_frontier(hof) foreach(dominating) do member if member.loss < 0.0 @@ -220,36 +222,52 @@ function format_hall_of_fame(hof::HallOfFame{T,L}, options) where {T,L} end end + member_fields = if length(dominating) == 0 + Union{}[] + else + collect(propertynames(first(dominating))) + end + filter!(f -> f != :tree && f != :loss, member_fields) + coldata = Dict{Symbol,Any}() + coldata[:trees] = [member.tree for member in dominating] + coldata[:losses] = [member.loss for member in dominating] + + for f in member_fields + coldata[f] = [getfield(m, f) for m in dominating] + end + coldata[:complexities] = [compute_complexity(m, options) for m in dominating] ZERO_POINT = eps(L) cur_loss = typemax(L) last_loss = cur_loss last_complexity = 0 - trees = [member.tree for member in dominating] - losses = [member.loss for member in dominating] - complexities = [compute_complexity(member, options) for member in dominating] - scores = Array{L}(undef, length(dominating)) - - for i in 1:length(dominating) - complexity = complexities[i] - cur_loss = losses[i] + coldata[:scores] = Vector{L}(undef, length(dominating)) + for i in eachindex(dominating) + complexity = coldata[:complexities][i] + cur_loss = coldata[:losses][i] delta_c = complexity - last_complexity delta_l_mse = log(relu(cur_loss / last_loss) + ZERO_POINT) - - scores[i] = relu(-delta_l_mse / delta_c) + coldata[:scores][i] = relu(-delta_l_mse / delta_c) last_loss = cur_loss last_complexity = complexity end - return (; trees, scores, losses, complexities) + # For coldata, only keep the columns that are in `columns` + if columns !== nothing + for c in keys(coldata) + if !(c in columns) + delete!(coldata, c) + end + end + end + return NamedTuple(coldata) end -function format_hall_of_fame(hof::AbstractVector{<:HallOfFame}, options) + +@unstable function format_hall_of_fame(hof::AbstractVector{<:HallOfFame}, options) outs = [format_hall_of_fame(h, options) for h in hof] - return (; - trees=[out.trees for out in outs], - scores=[out.scores for out in outs], - losses=[out.losses for out in outs], - complexities=[out.complexities for out in outs], - ) + isempty(outs) && return NamedTuple() + ks = propertynames(first(outs)) + vals = map(k -> [getfield(o, k) for o in outs], ks) + return NamedTuple{ks}(vals) end # TODO: Re-use this in `string_dominating_pareto_curve` diff --git a/src/PopMember.jl b/src/PopMember.jl index 1c6b143a1..b28074638 100644 --- a/src/PopMember.jl +++ b/src/PopMember.jl @@ -49,7 +49,7 @@ end end return getfield(member, field) end -function Base.show(io::IO, p::M) where {T,L,N,M<:PopMember{T,L,N}} +function Base.show(io::IO, p::PM) where {T,L,N,PM<:PopMember{T,L,N}} shower(x) = sprint(show, x) print(io, "PopMember(") print(io, "tree = (", string_tree(p.tree), "), ") diff --git a/src/SearchUtils.jl b/src/SearchUtils.jl index 3d081a0ac..696e596b3 100644 --- a/src/SearchUtils.jl +++ b/src/SearchUtils.jl @@ -244,9 +244,9 @@ function get_worker_output_type( end #! format: off -extract_from_worker(p::DefaultWorkerOutputType, _, _) = p -extract_from_worker(f::Future, ::Type{P}, ::Type{H}) where {P,H} = fetch(f)::DefaultWorkerOutputType{P,H} -extract_from_worker(t::Task, ::Type{P}, ::Type{H}) where {P,H} = fetch(t)::DefaultWorkerOutputType{P,H} +@unstable extract_from_worker(p::DefaultWorkerOutputType, _, _) = p +@unstable extract_from_worker(f::Future, ::Type{P}, ::Type{H}) where {P,H} = fetch(f)::DefaultWorkerOutputType{P,H} +@unstable extract_from_worker(t::Task, ::Type{P}, ::Type{H}) where {P,H} = fetch(t)::DefaultWorkerOutputType{P,H} #! format: on macro sr_spawner(expr, kws...) diff --git a/src/SymbolicRegression.jl b/src/SymbolicRegression.jl index 77fb0e2ba..fd51b430e 100644 --- a/src/SymbolicRegression.jl +++ b/src/SymbolicRegression.jl @@ -561,10 +561,10 @@ end datasets::Vector{D}, ropt::AbstractRuntimeOptions, options::AbstractOptions, saved_state ) where {D<:Dataset} _validate_options(datasets, ropt, options) - state = _create_workers(datasets, ropt, options) + state = _create_workers(PopMember, datasets, ropt, options) _initialize_search!(state, datasets, ropt, options, saved_state) - _warmup_search!(state, datasets, ropt, options) - _main_search_loop!(state, datasets, ropt, options) + _warmup_search!(PopMember, state, datasets, ropt, options) + _main_search_loop!(PopMember, state, datasets, ropt, options) _tear_down!(state, ropt, options) _info_dump(state, datasets, ropt, options) return _format_output(state, datasets, ropt, options) @@ -601,8 +601,8 @@ function _validate_options( return nothing end @stable default_mode = "disable" function _create_workers( - datasets::Vector{D}, ropt::AbstractRuntimeOptions, options::AbstractOptions -) where {T,L,D<:Dataset{T,L}} + ::Type{PM}, datasets::Vector{D}, ropt::AbstractRuntimeOptions, options::AbstractOptions +) where {T,L,D<:Dataset{T,L},PM<:AbstractPopMember} stdin_reader = watch_stream(options.input_stream) record = RecordType() @@ -613,7 +613,7 @@ end example_ex = create_expression(zero(T), options, example_dataset) NT = typeof(example_ex) PopType = Population{T,L,NT} - HallOfFameType = HallOfFame{T,L,NT} + HallOfFameType = HallOfFame{T,L,NT,PM{T,L,NT}} WorkerOutputType = get_worker_output_type( Val(ropt.parallelism), PopType, HallOfFameType ) @@ -768,11 +768,12 @@ function _initialize_search!( return nothing end function _warmup_search!( + ::Type{PM}, state::AbstractSearchState{T,L,N}, datasets, ropt::AbstractRuntimeOptions, options::AbstractOptions, -) where {T,L,N} +) where {T,L,N,PM<:AbstractPopMember} nout = length(datasets) for j in 1:nout, i in 1:(options.populations) dataset = datasets[j] @@ -790,7 +791,7 @@ function _warmup_search!( updated_pop = @sr_spawner( begin in_pop = first( - extract_from_worker(last_pop, Population{T,L,N}, HallOfFame{T,L,N}) + extract_from_worker(last_pop, Population{T,L,N}, HallOfFame{T,L,N,PM{T,L,N}}) ) _dispatch_s_r_cycle( in_pop, @@ -802,7 +803,7 @@ function _warmup_search!( ropt.verbosity, cur_maxsize, running_search_statistics=c_rss, - )::DefaultWorkerOutputType{Population{T,L,N},HallOfFame{T,L,N}} + )::DefaultWorkerOutputType{Population{T,L,N},HallOfFame{T,L,N,PM{T,L,N}}} end, parallelism = ropt.parallelism, worker_idx = worker_idx @@ -812,11 +813,12 @@ function _warmup_search!( return nothing end function _main_search_loop!( + ::Type{PM}, state::AbstractSearchState{T,L,N}, datasets, ropt::AbstractRuntimeOptions, options::AbstractOptions, -) where {T,L,N} +) where {T,L,N,PM<:AbstractPopMember} ropt.verbosity > 0 && @info "Started!" nout = length(datasets) start_time = time() @@ -892,7 +894,7 @@ function _main_search_loop!( ) else state.worker_output[j][i] - end::DefaultWorkerOutputType{Population{T,L,N},HallOfFame{T,L,N}} + end::DefaultWorkerOutputType{Population{T,L,N},HallOfFame{T,L,N,PM{T,L,N}}} state.last_pops[j][i] = copy(cur_pop) state.best_sub_pops[j][i] = best_sub_pop(cur_pop; topn=options.topn) @recorder state.record[] = recursive_merge(state.record[], cur_record) From bfc0df2f74b2b4db2fc010975d7e85cfac085da2 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Mon, 21 Apr 2025 06:40:24 +0000 Subject: [PATCH 4/6] format :/ --- src/ExpressionBuilder.jl | 2 +- src/HallOfFame.jl | 54 +++++++++++++++++++++++---------------- src/Mutate.jl | 5 ++-- src/PopMember.jl | 12 ++++++--- src/SymbolicRegression.jl | 4 ++- 5 files changed, 47 insertions(+), 30 deletions(-) diff --git a/src/ExpressionBuilder.jl b/src/ExpressionBuilder.jl index 4c1142eab..d4f926733 100644 --- a/src/ExpressionBuilder.jl +++ b/src/ExpressionBuilder.jl @@ -136,7 +136,7 @@ end function embed_metadata( vec::Vector{<:Union{HallOfFame,Population,AbstractPopMember}}, options::AbstractOptions, - dataset::Dataset{T,L} + dataset::Dataset{T,L}, ) where {T,L} return map(Fix{2}(Fix{3}(embed_metadata, dataset), options), vec) end diff --git a/src/HallOfFame.jl b/src/HallOfFame.jl index b26fadac1..26b78b2f1 100644 --- a/src/HallOfFame.jl +++ b/src/HallOfFame.jl @@ -23,7 +23,9 @@ have been set, you can run `.members[exists]`. These are ordered by complexity, with `.members[1]` the member with complexity 1. - `exists::Array{Bool,1}`: Whether the member at the given complexity has been set. """ -struct HallOfFame{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T},PM<:AbstractPopMember{T,L,N}} +struct HallOfFame{ + T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T},PM<:AbstractPopMember{T,L,N} +} members::Array{PM,1} exists::Array{Bool,1} #Whether it has been set end @@ -66,7 +68,7 @@ function HallOfFame( ) where {T<:DATA_TYPE,L<:LOSS_TYPE} base_tree = create_expression(zero(T), options, dataset) - return HallOfFame{T,L,typeof(base_tree), PopMember{T,L,typeof(base_tree)}}( + return HallOfFame{T,L,typeof(base_tree),PopMember{T,L,typeof(base_tree)}}( [ PopMember( copy(base_tree), @@ -131,21 +133,22 @@ end # ) # end -_fmt(x::Integer) = @sprintf("%-10d", x) +_fmt(x::Integer) = @sprintf("%-10d", x) _fmt(x::AbstractFloat) = @sprintf("%-8.3e", x) -_fmt(x) = rpad(string(x), 12) # fallback +_fmt(x) = rpad(string(x), 12) # fallback function string_dominating_pareto_curve( hallOfFame, dataset, options; width::Union{Integer,Nothing}=nothing, pretty::Bool=true ) terminal_width = (width === nothing) ? 100 : max(100, width::Integer) formatted = format_hall_of_fame(hallOfFame, options) - stat_cols = collect(propertynames(formatted)) + stat_cols = collect(propertynames(formatted)) filter!(c -> c ≠ :trees, stat_cols) - priority = [:complexity, :loss, :score] - stat_cols = vcat(intersect(priority, stat_cols), - setdiff(stat_cols, priority)) - header_cells = [rpad(styled("{bold:{underline:$(titlecase(string(c)))}}"), 12) for c in stat_cols] + priority = [:complexity, :loss, :score] + stat_cols = vcat(intersect(priority, stat_cols), setdiff(stat_cols, priority)) + header_cells = [ + rpad(styled("{bold:{underline:$(titlecase(string(c)))}}"), 12) for c in stat_cols + ] push!(header_cells, styled("{bold:{underline:Equation}}")) header = join(header_cells, " ") @@ -154,18 +157,23 @@ function string_dominating_pareto_curve( println(buffer, '─'^(terminal_width - 1)) println(buffer, header) for i in 1:length(formatted.trees) - stats = join((_fmt(getfield(formatted, c)[i]) for c in stat_cols), " ") + stats = join((_fmt(getfield(formatted, c)[i]) for c in stat_cols), " ") print(buffer, stats) - eqn = string_tree(formatted.trees[i], options; - display_variable_names = dataset.display_variable_names, - X_sym_units = dataset.X_sym_units, - y_sym_units = dataset.y_sym_units, - pretty) + eqn = string_tree( + formatted.trees[i], + options; + display_variable_names=dataset.display_variable_names, + X_sym_units=dataset.X_sym_units, + y_sym_units=dataset.y_sym_units, + pretty, + ) prefix = make_prefix(formatted.trees[i], options, dataset) - print(buffer, - wrap_equation_string(prefix * eqn, - length(stats) + length(prefix) + 2, - terminal_width)) + print( + buffer, + wrap_equation_string( + prefix * eqn, length(stats) + length(prefix) + 2, terminal_width + ), + ) end print(buffer, '─'^(terminal_width - 1)) return dump_buffer(buffer) @@ -207,9 +215,11 @@ function wrap_equation_string(eqn_string, left_cols_width, terminal_width) return dump_buffer(buffer) end -@unstable function format_hall_of_fame(hof::HallOfFame{T,L,N,PM}, options; - columns::Union{Vector{Symbol},Nothing}=[:losses, :complexities, :scores, :trees] - ) where {T,L,N,PM<:PopMember{T,L,N}} +@unstable function format_hall_of_fame( + hof::HallOfFame{T,L,N,PM}, + options; + columns::Union{Vector{Symbol},Nothing}=[:losses, :complexities, :scores, :trees], +) where {T,L,N,PM<:PopMember{T,L,N}} dominating = calculate_pareto_frontier(hof) foreach(dominating) do member if member.loss < 0.0 diff --git a/src/Mutate.jl b/src/Mutate.jl index 829d2704d..639414206 100644 --- a/src/Mutate.jl +++ b/src/Mutate.jl @@ -60,7 +60,8 @@ This struct encapsulates the result of a mutation operation. Either a new expres Return the `member` if you want to return immediately, and have computed the loss value as part of the mutation. """ -struct MutationResult{N<:AbstractExpression,P<:AbstractPopMember} <: AbstractMutationResult{N,P} +struct MutationResult{N<:AbstractExpression,P<:AbstractPopMember} <: + AbstractMutationResult{N,P} tree::Union{N,Nothing} member::Union{P,Nothing} num_evals::Float64 @@ -348,7 +349,7 @@ end weights::W, options::AbstractOptions; kws..., -) where {W<:AbstractMutationWeights, PM<:AbstractPopMember} +) where {W<:AbstractMutationWeights,PM<:AbstractPopMember} mutation_choices = fieldnames(W) quote Base.Cartesian.@nif( diff --git a/src/PopMember.jl b/src/PopMember.jl index b28074638..f7e6763ef 100644 --- a/src/PopMember.jl +++ b/src/PopMember.jl @@ -10,7 +10,8 @@ using ..LossFunctionsModule: eval_cost abstract type AbstractPopMember{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} end # Define a member of population by equation, cost, and age -mutable struct PopMember{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} <: AbstractPopMember{T,L,N} +mutable struct PopMember{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} <: + AbstractPopMember{T,L,N} tree::N cost::L # Inludes complexity penalty, normalization loss::L # Raw loss @@ -21,7 +22,9 @@ mutable struct PopMember{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} <: ref::Int parent::Int end -@inline function Base.setproperty!(member::M, field::Symbol, value) where {M<:AbstractPopMember} +@inline function Base.setproperty!( + member::M, field::Symbol, value +) where {M<:AbstractPopMember} if field == :complexity throw( error("Don't set `.complexity` directly. Use `recompute_complexity!` instead.") @@ -36,7 +39,9 @@ end end return setfield!(member, field, value) end -@unstable @inline function Base.getproperty(member::M, field::Symbol) where {M<:AbstractPopMember} +@unstable @inline function Base.getproperty( + member::M, field::Symbol +) where {M<:AbstractPopMember} if field == :complexity throw( error("Don't access `.complexity` directly. Use `compute_complexity` instead.") @@ -180,5 +185,4 @@ function recompute_complexity!( return complexity end - end diff --git a/src/SymbolicRegression.jl b/src/SymbolicRegression.jl index fd51b430e..ec9a31742 100644 --- a/src/SymbolicRegression.jl +++ b/src/SymbolicRegression.jl @@ -791,7 +791,9 @@ function _warmup_search!( updated_pop = @sr_spawner( begin in_pop = first( - extract_from_worker(last_pop, Population{T,L,N}, HallOfFame{T,L,N,PM{T,L,N}}) + extract_from_worker( + last_pop, Population{T,L,N}, HallOfFame{T,L,N,PM{T,L,N}} + ), ) _dispatch_s_r_cycle( in_pop, From 6520bcaab3549f7a3ff2e541e210e798b96f5bf4 Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Fri, 10 Oct 2025 10:46:21 +0000 Subject: [PATCH 5/6] init --- .JuliaFormatter.toml | 1 + .github/codecov.yml | 8 + .github/workflows/CI.yml | 50 +- .github/workflows/Documentation.yml | 48 +- .github/workflows/benchmark_pr.yml | 69 +-- .github/workflows/check-format.yml | 4 +- .github/workflows/fix-format.yml | 4 +- .gitignore | 7 +- .pre-commit-config.yaml | 2 +- Project.toml | 17 +- README.md | 37 +- benchmark/benchmarks.jl | 6 +- docs/Project.toml | 5 +- docs/make.jl | 357 ++++++++--- docs/package.json | 16 + docs/src/.vitepress/config.mts | 95 +++ docs/src/.vitepress/theme/index.ts | 50 ++ docs/src/.vitepress/theme/style.css | 92 +++ docs/src/api.md | 6 + docs/src/assets/logo.png | Bin 0 -> 159937 bytes docs/src/assets/logo.svg | 1 - docs/src/components/AuthorBadge.vue | 139 +++++ docs/src/components/Authors.vue | 28 + docs/src/components/VersionPicker.vue | 130 ++++ docs/src/examples.md | 78 ++- docs/src/slurm.md | 175 ++++++ docs/src/types.md | 4 +- docs/utils.jl | 8 +- examples/custom_types.jl | 264 ++++++++ examples/mooncake/Project.toml | 9 + examples/mooncake/example.jl | 17 + examples/parameterized_function.jl | 32 +- examples/template_expression_complex.jl | 4 +- ext/SymbolicRegressionEnzymeExt.jl | 36 +- ext/SymbolicRegressionMooncakeExt.jl | 44 ++ src/CheckConstraints.jl | 87 ++- src/Complexity.jl | 4 +- src/ComposableExpression.jl | 130 +++- src/Configure.jl | 57 +- src/ConstantOptimization.jl | 87 ++- src/Core.jl | 6 +- src/DimensionalAnalysis.jl | 116 ++-- src/ExpressionBuilder.jl | 10 +- src/HallOfFame.jl | 168 +++-- src/InterfaceDataTypes.jl | 34 + src/InterfaceDynamicExpressions.jl | 49 +- src/Logging.jl | 23 +- src/LossFunctions.jl | 5 +- src/MLJInterface.jl | 58 +- src/Mutate.jl | 38 +- src/MutationFunctions.jl | 586 +++++++++--------- src/MutationWeights.jl | 2 + src/Operators.jl | 3 +- src/Options.jl | 576 ++++++++++------- src/OptionsStruct.jl | 127 ++-- src/ParametricExpression.jl | 29 +- src/ProgramConstants.jl | 2 +- src/SearchUtils.jl | 174 +++++- src/SymbolicRegression.jl | 126 +++- src/TemplateExpression.jl | 218 ++++++- src/TemplateExpressionMacro.jl | 56 +- src/Utils.jl | 30 +- test/Project.toml | 5 +- test/autodiff_helpers.jl | 93 +++ test/runtests.jl | 45 +- test/test_abstract_numbers.jl | 87 ++- test/test_composable_expression.jl | 86 +++ test/test_constraints.jl | 18 + test/test_custom_objectives.jl | 32 +- test/test_custom_operators_multiprocessing.jl | 100 +-- test/test_derivatives.jl | 4 +- test/test_early_stop.jl | 78 ++- test/test_expression_builder.jl | 53 ++ test/test_expression_derivatives.jl | 127 ++-- test/test_feature_mutation.jl | 79 +++ test/test_filtered_async.jl | 56 ++ test/test_guesses.jl | 538 ++++++++++++++++ ...oss_function_expression_multiprocessing.jl | 14 +- test/test_loss_scale.jl | 172 +++++ test/test_mixed_utils.jl | 2 +- test/test_mlj.jl | 35 +- test/test_mooncake_autodiff.jl | 141 +++++ test/test_operators.jl | 48 +- test/test_options.jl | 124 +++- test/test_parametric_template_expressions.jl | 11 +- test/test_pretty_printing.jl | 4 +- test/test_rotation.jl | 58 +- test/test_template_expression.jl | 105 +++- test/test_template_macro.jl | 12 + 89 files changed, 5385 insertions(+), 1386 deletions(-) create mode 100644 .github/codecov.yml create mode 100644 docs/package.json create mode 100644 docs/src/.vitepress/config.mts create mode 100644 docs/src/.vitepress/theme/index.ts create mode 100644 docs/src/.vitepress/theme/style.css create mode 100644 docs/src/assets/logo.png delete mode 100644 docs/src/assets/logo.svg create mode 100644 docs/src/components/AuthorBadge.vue create mode 100644 docs/src/components/Authors.vue create mode 100644 docs/src/components/VersionPicker.vue create mode 100644 docs/src/slurm.md create mode 100644 examples/custom_types.jl create mode 100644 examples/mooncake/Project.toml create mode 100644 examples/mooncake/example.jl create mode 100644 ext/SymbolicRegressionMooncakeExt.jl create mode 100644 src/InterfaceDataTypes.jl create mode 100644 test/autodiff_helpers.jl create mode 100644 test/test_feature_mutation.jl create mode 100644 test/test_filtered_async.jl create mode 100644 test/test_guesses.jl create mode 100644 test/test_loss_scale.jl create mode 100644 test/test_mooncake_autodiff.jl diff --git a/.JuliaFormatter.toml b/.JuliaFormatter.toml index e8b8efea5..352839595 100644 --- a/.JuliaFormatter.toml +++ b/.JuliaFormatter.toml @@ -1,3 +1,4 @@ style = "blue" ignore = ["docs"] +# Keep pipe operators as they are more readable pipe_to_function_call = false diff --git a/.github/codecov.yml b/.github/codecov.yml new file mode 100644 index 000000000..bfdc9877d --- /dev/null +++ b/.github/codecov.yml @@ -0,0 +1,8 @@ +coverage: + status: + project: + default: + informational: true + patch: + default: + informational: true diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index cf1ace9ba..36a105b01 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -2,8 +2,6 @@ name: CI on: push: - branches: - - master paths: - "**" pull_request: @@ -23,6 +21,7 @@ jobs: name: Julia ${{ matrix.julia-version }}-${{ matrix.os }}-${{ matrix.test }}-${{ github.event_name }} runs-on: ${{ matrix.os }} timeout-minutes: 240 + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository strategy: fail-fast: false matrix: @@ -54,9 +53,15 @@ jobs: - os: macOS-latest julia-version: "1" test: "part3" + - os: ubuntu-latest + julia-version: "1" + test: "enzyme" + - os: ubuntu-latest + julia-version: "1" + test: "jet" steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: "Set up Julia" uses: julia-actions/setup-julia@v2 with: @@ -67,6 +72,12 @@ jobs: cache-name: julia-cache;workflow=${{ github.workflow }};job=${{ github.job }};os=${{ matrix.os }};julia=${{ matrix.julia-version }};project=${{ hashFiles('**/Project.toml') }} - name: "Build package" uses: julia-actions/julia-buildpkg@v1 + - name: "Install Enzyme (if needed)" + if: matrix.test == 'enzyme' + run: julia --color=yes --project=test -e 'import Pkg; Pkg.add("Enzyme")' + - name: "Install JET (if needed)" + if: matrix.test == 'jet' + run: julia --color=yes --project=test -e 'import Pkg; Pkg.add("JET")' - name: "Run tests" env: SYMBOLIC_REGRESSION_TEST_SUITE: ${{ matrix.test }} @@ -75,19 +86,30 @@ jobs: julia --color=yes --threads=auto --check-bounds=yes --depwarn=yes --code-coverage=user -e 'import Coverage; import Pkg; Pkg.activate("."); Pkg.test(coverage=true)' julia --color=yes coverage.jl shell: bash - - name: "Coveralls" - uses: coverallsapp/github-action@v2 + - name: "Upload coverage artifacts" + uses: actions/upload-artifact@v4 with: - path-to-lcov: lcov.info - parallel: true - flag-name: julia-${{ matrix.julia-version }}-${{ matrix.os }}-${{ matrix.test }}-${{ github.event_name }} + name: coverage-${{ matrix.julia-version }}-${{ matrix.os }}-${{ matrix.test }} + path: lcov.info + retention-days: 1 - coveralls: - name: Indicate completion to coveralls - runs-on: ubuntu-latest + upload-coverage: + name: Upload Coverage to Codecov needs: test + runs-on: ubuntu-latest + # Only run on pushes to master or pull requests + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository steps: - - name: Finish - uses: coverallsapp/github-action@v2 + - uses: actions/checkout@v5 + - name: Download all coverage artifacts + uses: actions/download-artifact@v5 + with: + pattern: coverage-* + path: coverage + - name: Upload to Codecov + uses: codecov/codecov-action@v5 with: - parallel-finished: true + token: ${{ secrets.CODECOV_TOKEN }} + directory: ./coverage + fail_ci_if_error: true + verbose: true diff --git a/.github/workflows/Documentation.yml b/.github/workflows/Documentation.yml index 57cd452ef..e03b7643f 100644 --- a/.github/workflows/Documentation.yml +++ b/.github/workflows/Documentation.yml @@ -9,27 +9,59 @@ on: - "docs/**" - "README.md" - ".github/workflows/**" + - "examples/**" tags: "*" + pull_request: + branches: + - master + paths: + - "src/**" + - "docs/**" + - "README.md" + - ".github/workflows/**" + - "examples/**" jobs: build: runs-on: ubuntu-latest + permissions: + contents: write + pull-requests: read + statuses: write + strategy: + matrix: + target: [astroautomata, cambridge] steps: - - uses: actions/checkout@v4 - - uses: julia-actions/setup-julia@latest + - uses: actions/checkout@v5 + - uses: julia-actions/setup-julia@v2 with: version: "1" - - name: "Cache dependencies" + - uses: actions/setup-node@v5 + with: + node-version: "20" + - name: Load Julia packages from cache + id: julia-cache uses: julia-actions/cache@v2 - - name: Force use of updated Julia registry - run: | - rm -rf ~/.julia/registries/General - julia -e 'ENV["JULIA_PKG_SERVER"]=""; using Pkg; Pkg.update()' - - name: "Install dependencies" + with: + cache-name: julia-cache;workflow=${{ github.workflow }};job=${{ github.job }};os=${{ runner.os }};julia=1;project=${{ hashFiles('**/Project.toml') }} + - name: "Install Julia dependencies" run: julia --project=docs/ -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd())); Pkg.instantiate()' + - name: "Install Node.js dependencies" + run: cd docs && npm install - name: "Build and deploy" env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} DOCUMENTER_KEY_ASTROAUTOMATA: ${{ secrets.DOCUMENTER_KEY }} DOCUMENTER_KEY_CAM: ${{ secrets.DAMTP_DEPLOY_KEY }} + DOCUMENTER_PRODUCTION: "true" + DEPLOYMENT_TARGET: ${{ matrix.target }} run: | julia --project=docs/ docs/make.jl + - name: Save Julia depot cache on cancel or failure + id: julia-cache-save + if: cancelled() || failure() + uses: actions/cache/save@v4 + with: + path: | + ${{ steps.julia-cache.outputs.cache-paths }} + key: ${{ steps.julia-cache.outputs.cache-key }} diff --git a/.github/workflows/benchmark_pr.yml b/.github/workflows/benchmark_pr.yml index 69380c4cc..7d32bd419 100644 --- a/.github/workflows/benchmark_pr.yml +++ b/.github/workflows/benchmark_pr.yml @@ -12,72 +12,11 @@ concurrency: permissions: pull-requests: write - jobs: - generate_plots: + bench: runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: julia-actions/setup-julia@v2 - with: - version: "1" - - uses: julia-actions/cache@v2 - - name: Extract Package Name from Project.toml - id: extract-package-name - run: | - PACKAGE_NAME=$(grep "^name" Project.toml | sed 's/^name = "\(.*\)"$/\1/') - echo "::set-output name=package_name::$PACKAGE_NAME" - - name: Build AirspeedVelocity - env: - JULIA_NUM_THREADS: 2 - run: | - # Lightweight build step, as sometimes the runner runs out of memory: - julia -e 'ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0; import Pkg; Pkg.add(;url="https://github.com/MilesCranmer/AirspeedVelocity.jl.git")' - julia -e 'ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0; import Pkg; Pkg.build("AirspeedVelocity")' - - name: Add ~/.julia/bin to PATH - run: | - echo "$HOME/.julia/bin" >> $GITHUB_PATH - - name: Run benchmarks - run: | - echo $PATH - ls -l ~/.julia/bin - mkdir results - benchpkg ${{ steps.extract-package-name.outputs.package_name }} --rev="${{github.event.repository.default_branch}},${{github.event.pull_request.head.sha}}" --url=${{ github.event.repository.clone_url }} --bench-on="${{github.event.pull_request.head.sha}}" --output-dir=results/ --exeflags="-O3 --threads=auto" - - name: Create plots from benchmarks - run: | - mkdir -p plots - benchpkgplot ${{ steps.extract-package-name.outputs.package_name }} --rev="${{github.event.repository.default_branch}},${{github.event.pull_request.head.sha}}" --npart=10 --format=png --input-dir=results/ --output-dir=plots/ - - name: Upload plot as artifact - uses: actions/upload-artifact@v4 - with: - name: plots - path: plots - - name: Create markdown table from benchmarks - run: | - benchpkgtable ${{ steps.extract-package-name.outputs.package_name }} --rev="${{github.event.repository.default_branch}},${{github.event.pull_request.head.sha}}" --input-dir=results/ --ratio > table.md - echo '### Benchmark Results' > body.md - echo '' >> body.md - echo '' >> body.md - cat table.md >> body.md - echo '' >> body.md - echo '' >> body.md - echo '### Benchmark Plots' >> body.md - echo 'A plot of the benchmark results have been uploaded as an artifact to the workflow run for this PR.' >> body.md - echo 'Go to "Actions"->"Benchmark a pull request"->[the most recent run]->"Artifacts" (at the bottom).' >> body.md - - - name: Find Comment - uses: peter-evans/find-comment@v3 - id: fcbenchmark - with: - issue-number: ${{ github.event.pull_request.number }} - comment-author: "github-actions[bot]" - body-includes: Benchmark Results - - - name: Comment on PR - uses: peter-evans/create-or-update-comment@v4 + - uses: MilesCranmer/AirspeedVelocity.jl@action-v1 with: - comment-id: ${{ steps.fcbenchmark.outputs.comment-id }} - issue-number: ${{ github.event.pull_request.number }} - body-path: body.md - edit-mode: replace + julia-version: "1" + exeflags: "-O3 --threads=auto" diff --git a/.github/workflows/check-format.yml b/.github/workflows/check-format.yml index 6bc6bdd51..b6c239135 100644 --- a/.github/workflows/check-format.yml +++ b/.github/workflows/check-format.yml @@ -17,7 +17,7 @@ jobs: julia-version: [1] os: [ubuntu-latest] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: julia-actions/setup-julia@v2 with: version: ${{ matrix.julia-version }} @@ -25,7 +25,7 @@ jobs: uses: julia-actions/cache@v2 - name: Install JuliaFormatter and format run: | - julia --startup-file=no -e 'using Pkg; pkg"activate --temp"; pkg"add JuliaFormatter@1.0.61"; using JuliaFormatter; format("."; verbose=true)' + julia --startup-file=no -e 'using Pkg; pkg"activate --temp"; pkg"add JuliaFormatter@2"; using JuliaFormatter; format("."; verbose=true)' - name: "Format check" run: | julia -e ' diff --git a/.github/workflows/fix-format.yml b/.github/workflows/fix-format.yml index 9c6a1a3f0..992ef90cb 100644 --- a/.github/workflows/fix-format.yml +++ b/.github/workflows/fix-format.yml @@ -12,7 +12,7 @@ jobs: julia-version: [1] os: [ubuntu-latest] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: julia-actions/setup-julia@v2 with: version: ${{ matrix.julia-version }} @@ -20,7 +20,7 @@ jobs: uses: julia-actions/cache@v2 - name: "Install JuliaFormatter and format" run: | - julia -e 'import Pkg; Pkg.add("JuliaFormatter")' + julia -e 'using Pkg; pkg"add JuliaFormatter@1.0.61"' julia -e 'using JuliaFormatter; format(".")' - name: "Create Pull Request" id: cpr diff --git a/.gitignore b/.gitignore index ec9d9b2f7..87d0075ab 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,11 @@ pysr_recorder.json docs/src/index.md *.code-workspace .vscode -**/*.json +docs/node_modules/ +docs/package-lock.json + LocalPreferences.toml private + +# Generated examples: +docs/src/examples/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8c833513a..7f8b5954c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,7 +11,7 @@ repos: - id: check-yaml - id: check-added-large-files - repo: https://github.com/domluna/JuliaFormatter.jl - rev: v1.0.61 + rev: v2.1.6 hooks: - id: julia-formatter - repo: https://github.com/pre-commit/mirrors-prettier diff --git a/Project.toml b/Project.toml index 85cc65c5c..3d6620ace 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "SymbolicRegression" uuid = "8254be44-1295-4e6a-a16d-46603ac705cb" authors = ["MilesCranmer "] -version = "1.9.2" +version = "2.0.0-alpha.9" [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" @@ -34,11 +34,13 @@ TOML = "fa267f1f-6049-4f14-aa54-33bafae1ed76" [weakdeps] Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" JSON3 = "0f8b85d8-7281-11e9-16c2-39a750bddbf1" +Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b" [extensions] SymbolicRegressionEnzymeExt = "Enzyme" SymbolicRegressionJSON3Ext = "JSON3" +SymbolicRegressionMooncakeExt = "Mooncake" SymbolicRegressionSymbolicUtilsExt = "SymbolicUtils" [compat] @@ -46,24 +48,25 @@ ADTypes = "^1.4.0" Compat = "^4.16" ConstructionBase = "1.0.0 - 1.5.6, 1.5.8 - 1" Dates = "1" -DifferentiationInterface = "0.6.39" +DifferentiationInterface = "0.6.39, 0.7" DispatchDoctor = "^0.4.17" Distributed = "<0.0.1, 1" -DynamicDiff = "0.2" -DynamicExpressions = "~1.10.0" +DynamicDiff = "0.3" +DynamicExpressions = "2.4 - 2.4" DynamicQuantities = "1" Enzyme = "0.12, 0.13" JSON3 = "1" LineSearches = "7" Logging = "1" LossFunctions = "0.10, 0.11, 1" -MLJModelInterface = "1.5 - 1.11" +MLJModelInterface = "1.5 - 1.12" MacroTools = "0.4, 0.5" -Optim = "1.8 - 1.12" +Mooncake = "0.4.137" +Optim = "1.8 - 1.13" Pkg = "<0.0.1, 1" PrecompileTools = "1" Printf = "<0.0.1, 1" -ProgressMeter = "1.10.0 - 1.10.2" +ProgressMeter = "1.10.0 - 1.10.2, =1.10.4, =1.11.0" Random = "<0.0.1, 1" Reexport = "1" SpecialFunctions = "0.10.1, 1, 2" diff --git a/README.md b/README.md index 593a7286d..8548b3aa5 100644 --- a/README.md +++ b/README.md @@ -5,13 +5,36 @@ SymbolicRegression.jl searches for symbolic expressions which optimize a particu https://github.com/MilesCranmer/SymbolicRegression.jl/assets/7593028/f5b68f1f-9830-497f-a197-6ae332c94ee0 -| Latest release | Documentation | Forums | Paper | -| :---: | :---: | :---: | :---: | -| [![version](https://juliahub.com/docs/SymbolicRegression/version.svg)](https://juliahub.com/ui/Packages/SymbolicRegression/X2eIS) | [![Dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://ai.damtp.cam.ac.uk/symbolicregression/dev/) | [![Discussions](https://img.shields.io/badge/discussions-github-informational)](https://github.com/MilesCranmer/PySR/discussions) | [![Paper](https://img.shields.io/badge/arXiv-2305.01582-b31b1b)](https://arxiv.org/abs/2305.01582) | - -| Build status | Coverage | -| :---: | :---: | -| [![CI](https://github.com/MilesCranmer/SymbolicRegression.jl/workflows/CI/badge.svg)](.github/workflows/CI.yml) | [![Coverage Status](https://coveralls.io/repos/github/MilesCranmer/SymbolicRegression.jl/badge.svg?branch=master)](https://coveralls.io/github/MilesCranmer/SymbolicRegression.jl?branch=master) | + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Latest releaseDocumentationForumsPaper
versionDevDiscussionsPaper
Build statusCoverage
CICoverage Status
Check out [PySR](https://github.com/MilesCranmer/PySR) for a Python frontend. diff --git a/benchmark/benchmarks.jl b/benchmark/benchmarks.jl index 9e4862348..3412242b5 100644 --- a/benchmark/benchmarks.jl +++ b/benchmark/benchmarks.jl @@ -90,8 +90,8 @@ function create_utils_benchmark() setup = ( nfeatures = 1; dataset = Dataset(randn(nfeatures, 32), randn(32)); - pop = Population(dataset; npop=100, nlength=20, options=$options, nfeatures); - rss = RunningSearchStatistics(; options=$options) + pop = Population(dataset; npop=100, nlength=20, options=($options), nfeatures); + rss = RunningSearchStatistics(; options=($options)) ) ) @@ -156,7 +156,7 @@ function create_utils_benchmark() nfeatures = 1; T = Float64; dataset = Dataset(randn(nfeatures, 512), randn(512)); - ntrees = $ntrees; + ntrees = ($ntrees); trees = [ gen_random_tree_fixed_size(20, $options, nfeatures, T) for i in 1:ntrees ]; diff --git a/docs/Project.toml b/docs/Project.toml index f66ed72c1..d799e1616 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -1,11 +1,14 @@ [deps] Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" +DocumenterVitepress = "4710194d-e776-4893-9690-8d956a29c365" DynamicExpressions = "a40a106e-89c9-4ca8-8020-a735e8728b6b" Gumbo = "708ec375-b3d6-5a57-a7ce-8257bf98657a" Literate = "98b081ad-f1c9-55d3-8b20-4c87d4299306" MLJBase = "a7f614a8-145f-11e9-1d2a-a57a1082229d" +SymbolicRegression = "8254be44-1295-4e6a-a16d-46603ac705cb" SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [compat] -Documenter = "0.27" +Documenter = "1" +DocumenterVitepress = "=0.2.6" diff --git a/docs/make.jl b/docs/make.jl index 087217bd3..ba0dfdbdc 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,4 +1,5 @@ using Documenter +using DocumenterVitepress using SymbolicUtils using SymbolicRegression using SymbolicRegression: @@ -21,93 +22,259 @@ include("utils.jl") process_literate_blocks("test") process_literate_blocks("examples") -readme = open(dirname(@__FILE__) * "/../README.md") do io - read(io, String) +# Define the proper YAML frontmatter for VitePress - must be wrapped in @raw html for DocumenterVitepress +proper_yaml = """```@raw html +--- +layout: home + +hero: + name: SymbolicRegression.jl + text: Discover Mathematical Laws from Data + tagline: A flexible, user-friendly framework that automatically finds interpretable equations from your data + actions: + - theme: brand + text: Get Started + link: /examples + - theme: alt + text: API Reference 📚 + link: /api + - theme: alt + text: View on GitHub + link: https://github.com/MilesCranmer/SymbolicRegression.jl + image: + src: /logo.png + alt: SymbolicRegression.jl + +features: + - icon: 🔬 + title: Interpretable By Design + details: Discovers interpretable mathematical expressions instead of black-box models. + + - icon: 🚀 + title: Production Ready + details: Years of development have produced mature, highly optimized parallel evolutionary algorithms. + + - icon: 🔧 + title: Extremely Customizable + details: "Customize everything: operators, loss functions, complexity, input types, optimizer, and more." + + - icon: 🔌 + title: Julia Native + details: Built for automatic interoperability with the entire scientific computing stack. +--- +``` + +""" + +# Post-process VitePress output to fix YAML frontmatter and HTML escaping +function post_process_vitepress_index() + # Fix BOTH index.md files - in production mode, files are in build/1/ subdirectory + is_production = get(ENV, "DOCUMENTER_PRODUCTION", "false") == "true" + build_subdir = is_production ? "1" : "." + + for index_path in [ + joinpath(@__DIR__, "build", ".documenter", "index.md"), + joinpath(@__DIR__, "build", build_subdir, "index.md"), + ] + process_single_index_file(index_path) + end end -# First, we remove all markdown comments: -readme = replace(readme, r"" => s"") +function process_single_index_file(index_path) + if !isfile(index_path) + @warn "Index file not found: $index_path - skipping" + return false + end -# Then, we remove any line with " s"") + content = read(index_path, String) -# We delete the https://github.com/MilesCranmer/SymbolicRegression.jl/assets/7593028/f5b68f1f-9830-497f-a197-6ae332c94ee0, -# and replace it with a video: -readme = replace( - readme, - r"https://github.com/MilesCranmer/SymbolicRegression.jl/assets/7593028/f5b68f1f-9830-497f-a197-6ae332c94ee0" => - ( - """ - ```@raw html -
- -
- ``` - """ - ), -) + # Check if YAML frontmatter has been corrupted by DocumenterVitepress.jl + has_hero_pattern = occursin(r"hero:\s*name:", content) + if has_hero_pattern + # Replace the corrupted frontmatter with proper VitePress home layout + # Replace everything from the start up to the first "## Example" with our proper YAML + content = replace(content, r"^.*?(?=## Example)"s => proper_yaml) + end + + # Fix HTML escaping - unescape HTML entities + content = replace(content, "<" => "<") + content = replace(content, ">" => ">") + content = replace(content, """ => "\"") + content = replace(content, "'" => "'") + content = replace(content, "&" => "&") + + write(index_path, content) + return true +end + +readme = open(dirname(@__FILE__) * "/../README.md") do io + read(io, String) +end -# We prepend the `` with a ```@raw html -# and append the `
` with a ```: -readme = replace(readme, r"" => s"```@raw html\n
") -readme = replace(readme, r"
" => s"\n```") +# VitePress frontmatter for beautiful home page +vitepress_frontmatter = proper_yaml -# Then, we surround ```mermaid\n...\n``` snippets -# with ```@raw html\n
\n...\n
```: -readme = replace( +# Process README for VitePress +readme = replace(readme, r"" => s"") # Remove markdown comments +readme = replace(readme, r"<[/]?div.*" => s"") # Remove div tags +readme = replace(readme, r"\*\*Contents\*\*:.*?(?=## )"s => s"") # Remove Contents TOC +readme = replace(readme, r"## Contributors ✨.*$"s => s"") # Remove Contributors section onwards +readme = replace( # Convert video URL to proper video tag wrapped in @raw html for VitePress readme, - r"```mermaid([^`]*)```" => s"```@raw html\n
\n\1\n
\n```", + r"https://github.com/MilesCranmer/SymbolicRegression.jl/assets/7593028/f5b68f1f-9830-497f-a197-6ae332c94ee0" => """```@raw html +
+ +
+```""", ) -# Then, we init mermaid.js: -init_mermaid = """ -```@raw html - -``` -""" +# Wrap HTML tables in @raw html blocks for VitePress +readme = replace(readme, r"(.*?
)"s => s"```@raw html\n\1\n```") -readme = init_mermaid * readme +# Add VitePress frontmatter +readme = vitepress_frontmatter * readme -# Finally, we read in file docs/src/index_base.md: +# Read base content index_base = open(dirname(@__FILE__) * "/src/index_base.md") do io read(io, String) end -# And then we create "/src/index.md": -open(dirname(@__FILE__) * "/src/index.md", "w") do io +# Create index.md with VitePress frontmatter and content +index_md_path = dirname(@__FILE__) * "/src/index.md" +open(index_md_path, "w") do io write(io, readme) write(io, "\n") write(io, index_base) end +# Pre-process the source index.md to ensure clean YAML frontmatter +# This ensures VitePress processes clean YAML during makedocs() +function preprocess_source_index() + index_path = joinpath(@__DIR__, "src", "index.md") + if !isfile(index_path) + @warn "Source index file not found: $index_path - skipping" + return false + end + + content = read(index_path, String) + + # Check if YAML frontmatter has any issues that need fixing + has_hero_pattern = occursin(r"hero:\s*name:", content) + if has_hero_pattern + # Ensure YAML frontmatter is clean and properly formatted + # Replace everything from the start up to the first "## Example" with our proper YAML + content = replace(content, r"^.*?(?=## Example)"s => proper_yaml) + end + + # Fix any HTML escaping in the source + content = replace(content, "<" => "<") + content = replace(content, ">" => ">") + content = replace(content, """ => "\"") + content = replace(content, "'" => "'") + content = replace(content, "&" => "&") + + write(index_path, content) + return true +end + +# Fix VitePress base path for dual deployment +function fix_vitepress_base_path() + deployment_target = get(ENV, "DEPLOYMENT_TARGET", "astroautomata") + + # Determine the correct base path for each deployment + base_path = if deployment_target == "cambridge" + "/symbolicregression/dev/" + else + "/SymbolicRegression.jl/dev/" + end + + # Find and fix VitePress SOURCE config file (before build) + config_paths = [joinpath(@__DIR__, "src", ".vitepress", "config.mts")] + + for config_path in config_paths + if isfile(config_path) + @info "Fixing VitePress base path in $config_path for deployment target: $deployment_target" + content = read(config_path, String) + + # Replace the base path with the correct one for this deployment + # Look for existing base: '...' patterns and replace them + content = replace(content, r"base:\s*'[^']*'" => "base: '$base_path'") + + write(config_path, content) + @info "Updated VitePress base path to: $base_path" + else + @warn "VitePress config not found at: $config_path" + end + end +end + +# Run preprocessing on source files before makedocs() +preprocess_source_index() + +# Fix VitePress base path BEFORE makedocs() - this is crucial for timing! +fix_vitepress_base_path() + +# Configure deployment based on target +deployment_target = get(ENV, "DEPLOYMENT_TARGET", "astroautomata") + +deploy_config = Documenter.auto_detect_deploy_system() +if deployment_target == "cambridge" + # Cambridge deployment with different base path + deploy_decision = Documenter.deploy_folder( + deploy_config; + repo="github.com/ai-damtp-cam-ac-uk/symbolicregression", + devbranch="master", + devurl="dev", + push_preview=true, + ) +else + # Default astroautomata deployment + deploy_decision = Documenter.deploy_folder( + deploy_config; + repo="github.com/MilesCranmer/SymbolicRegression.jl", + devbranch="master", + devurl="dev", + push_preview=true, + ) +end + DocMeta.setdocmeta!( SymbolicRegression, :DocTestSetup, :(using LossFunctions, DynamicExpressions); recursive=true, ) + makedocs(; sitename="SymbolicRegression.jl", authors="Miles Cranmer", doctest=true, - strict=:doctest, - clean=true, - format=Documenter.HTML(; - canonical="https://ai.damtp.cam.ac.uk/symbolicregression/stable" + clean=get(ENV, "DOCUMENTER_PRODUCTION", "false") == "true", + warnonly=[:docs_block, :cross_references, :missing_docs], + format=DocumenterVitepress.MarkdownVitepress(; + repo="github.com/MilesCranmer/SymbolicRegression.jl", + devbranch="master", + devurl="dev", + deploy_url=nothing, + deploy_decision, + build_vitepress=get(ENV, "DOCUMENTER_PRODUCTION", "false") == "true", + md_output_path=if get(ENV, "DOCUMENTER_PRODUCTION", "false") == "true" + ".documenter" + else + "." + end, ), pages=[ - "Contents" => "index_base.md", "Home" => "index.md", "Examples" => [ "Short Examples" => "examples.md", "Template Expressions" => "examples/template_expression.md", "Parameterized Expressions" => "examples/parameterized_function.md", "Parameterized Template Expressions" => "examples/template_parametric_expression.md", + "Custom Types" => "examples/custom_types.md", + "Using SymbolicRegression.jl on a Cluster" => "slurm.md", ], "API" => "api.md", "Losses" => "losses.md", @@ -116,41 +283,81 @@ makedocs(; ], ) -# Next, we fix links in the docs/build/losses/index.html file: -using Gumbo - -html_type(::HTMLElement{S}) where {S} = S +# Post-processing after makedocs() (for any remaining issues in build output) +# This runs after VitePress build to fix any final rendering issues +post_process_vitepress_index() -function apply_to_a_href!(f!, element::HTMLElement) - if html_type(element) == :a && - haskey(element.attributes, "href") && - element.attributes["href"] == "@ref" - f!(element) - else - for child in element.children - typeof(child) <: HTMLElement && apply_to_a_href!(f!, child) - end +# Fix bases.txt if it's empty (prevents "no bases suitable for deployment" error) +function fix_empty_bases() + bases_file = joinpath(@__DIR__, "build", "bases.txt") + mkpath(dirname(bases_file)) + if !isfile(bases_file) || isempty(filter(!isempty, readlines(bases_file))) + @info "Creating/fixing bases.txt for deployment" + write(bases_file, "dev\n") end end -html_content = read("docs/build/losses/index.html", String) -html = parsehtml(html_content) +fix_empty_bases() -apply_to_a_href!(html.root) do element - # Replace the "href" to be equal to the contents of the tag, prefixed with #: - element.attributes["href"] = "#LossFunctions." * element.children[1].text -end +# Fix VitePress base path BEFORE building (moved to before makedocs) -# Then, we write the new html to the file, only if it has changed: -open("docs/build/losses/index.html", "w") do io - write(io, string(html)) +# Additional post-processing for VitePress production build issues +function fix_vitepress_production_output() + build_index_html = joinpath(@__DIR__, "build", "1", "index.html") + if !isfile(build_index_html) + @warn "Production index.html not found: $build_index_html" + return false + end + + content = read(build_index_html, String) + + # Check if the page is showing literal YAML instead of home layout + if occursin(r"

layout: home

", content) + @info "Detected literal YAML frontmatter in production HTML - fixing..." + + # This is a more complex fix - we need to regenerate the page with proper VitePress home layout + # For now, let's try to replace the literal YAML content with a message + content = replace( + content, + r"

layout: home

.*?
" => """
+
+

SymbolicRegression.jl

+

Note: VitePress home layout not working in production mode. Please use the dev server or check the documentation.

+
+
"""; + count=1, + ) + + write(build_index_html, content) + @info "Applied temporary fix to production HTML output" + return true + end + + return false end -if !haskey(ENV, "JL_LIVERELOAD") - ENV["DOCUMENTER_KEY"] = get(ENV, "DOCUMENTER_KEY_ASTROAUTOMATA", "") - deploydocs(; repo="github.com/MilesCranmer/SymbolicRegression.jl.git") +# Apply additional fix for production build +fix_vitepress_production_output() + +# Deploy based on environment variable - supports CI matrix strategy +deployment_target = get(ENV, "DEPLOYMENT_TARGET", "astroautomata") +if deployment_target == "astroautomata" + DocumenterVitepress.deploydocs(; + repo="github.com/MilesCranmer/SymbolicRegression.jl.git", + push_preview=true, + target="build", + devbranch="master", + ) +elseif deployment_target == "cambridge" ENV["DOCUMENTER_KEY"] = get(ENV, "DOCUMENTER_KEY_CAM", "") ENV["GITHUB_REPOSITORY"] = "ai-damtp-cam-ac-uk/symbolicregression.git" - deploydocs(; repo="github.com/ai-damtp-cam-ac-uk/symbolicregression.git") + DocumenterVitepress.deploydocs(; + repo="github.com/ai-damtp-cam-ac-uk/symbolicregression.git", + push_preview=true, + target="build", + devbranch="master", + ) +else + @warn "Unknown DEPLOYMENT_TARGET: $deployment_target. Skipping deployment." end diff --git a/docs/package.json b/docs/package.json new file mode 100644 index 000000000..a0fdc39da --- /dev/null +++ b/docs/package.json @@ -0,0 +1,16 @@ +{ + "scripts": { + "docs:dev": "vitepress dev build", + "docs:build": "vitepress build build", + "docs:preview": "vitepress preview build" + }, + "dependencies": { + "@nolebase/vitepress-plugin-enhanced-readabilities": "^2.14.0", + "d3-format": "^3.1.0", + "markdown-it": "^14.1.0", + "markdown-it-footnote": "^4.0.0", + "markdown-it-mathjax3": "^4.3.2", + "vitepress": "^1.6.3", + "vitepress-plugin-tabs": "^0.6.0" + } +} diff --git a/docs/src/.vitepress/config.mts b/docs/src/.vitepress/config.mts new file mode 100644 index 000000000..a38d47286 --- /dev/null +++ b/docs/src/.vitepress/config.mts @@ -0,0 +1,95 @@ +import { defineConfig } from 'vitepress' +import { tabsMarkdownPlugin } from 'vitepress-plugin-tabs' +import mathjax3 from "markdown-it-mathjax3"; +import footnote from "markdown-it-footnote"; + +function getBaseRepository(base: string): string { + if (!base || base === '/') return '/'; + const parts = base.split('/').filter(Boolean); + return parts.length > 0 ? `/${parts[0]}/` : '/'; +} + +const baseTemp = { + base: '/symbolicregression/dev/',// TODO: replace this in makedocs! +} + +const navTemp = { + nav: 'REPLACE_ME_DOCUMENTER_VITEPRESS', +} + +const nav = [ + ...navTemp.nav, + { + component: 'VersionPicker' + } +] + +// https://vitepress.dev/reference/site-config +export default defineConfig({ + base: '/symbolicregression/dev/',// TODO: replace this in makedocs! + title: 'REPLACE_ME_DOCUMENTER_VITEPRESS', + description: 'REPLACE_ME_DOCUMENTER_VITEPRESS', + lastUpdated: true, + cleanUrls: true, + outDir: 'REPLACE_ME_DOCUMENTER_VITEPRESS', // This is required for MarkdownVitepress to work correctly... + head: [ + ['link', { rel: 'icon', href: `${baseTemp.base}favicon.ico` }], + ['script', {src: `${getBaseRepository(baseTemp.base)}versions.js`}], + // ['script', {src: '/versions.js'], for custom domains, I guess if deploy_url is available. + ['script', {src: `${baseTemp.base}siteinfo.js`}] + ], + ignoreDeadLinks: true, + vite: { + define: { + __DEPLOY_ABSPATH__: JSON.stringify('REPLACE_ME_DOCUMENTER_VITEPRESS_DEPLOY_ABSPATH'), + }, + optimizeDeps: { + exclude: [ + '@nolebase/vitepress-plugin-enhanced-readabilities/client', + 'vitepress', + '@nolebase/ui', + ], + }, + ssr: { + noExternal: [ + // If there are other packages that need to be processed by Vite, you can add them here. + '@nolebase/vitepress-plugin-enhanced-readabilities', + '@nolebase/ui', + ], + }, + }, + + markdown: { + math: true, + config(md) { + md.use(tabsMarkdownPlugin), + md.use(mathjax3), + md.use(footnote) + }, + theme: { + light: "github-light", + dark: "github-dark" + }, + }, + themeConfig: { + outline: 'deep', + // https://vitepress.dev/reference/default-theme-config + logo: 'REPLACE_ME_DOCUMENTER_VITEPRESS', + search: { + provider: 'local', + options: { + detailedView: true + } + }, + nav, + sidebar: 'REPLACE_ME_DOCUMENTER_VITEPRESS', + editLink: 'REPLACE_ME_DOCUMENTER_VITEPRESS', + socialLinks: [ + { icon: 'github', link: 'https://github.com/MilesCranmer/SymbolicRegression.jl' } + ], + footer: { + message: 'Made with Documenter.jl, VitePress and DocumenterVitepress.jl
', + copyright: `© Copyright ${new Date().getUTCFullYear()}.` + }, + } +}) diff --git a/docs/src/.vitepress/theme/index.ts b/docs/src/.vitepress/theme/index.ts new file mode 100644 index 000000000..0a542941d --- /dev/null +++ b/docs/src/.vitepress/theme/index.ts @@ -0,0 +1,50 @@ +// .vitepress/theme/index.ts +import { h } from 'vue' +import DefaultTheme from 'vitepress/theme' +import type { Theme as ThemeConfig } from 'vitepress' + +import { + NolebaseEnhancedReadabilitiesMenu, + NolebaseEnhancedReadabilitiesScreenMenu, +} from '@nolebase/vitepress-plugin-enhanced-readabilities/client' + +import VersionPicker from "../../components/VersionPicker.vue" +import AuthorBadge from '../../components/AuthorBadge.vue' +import Authors from '../../components/Authors.vue' + +import { enhanceAppWithTabs } from 'vitepress-plugin-tabs/client' + +import '@nolebase/vitepress-plugin-enhanced-readabilities/client/style.css' +import './style.css' + +export const Theme: ThemeConfig = { + extends: DefaultTheme, + Layout() { + return h(DefaultTheme.Layout, null, { + 'nav-bar-content-after': () => [ + h(NolebaseEnhancedReadabilitiesMenu), // Enhanced Readabilities menu + ], + // A enhanced readabilities menu for narrower screens (usually smaller than iPad Mini) + 'nav-screen-content-after': () => h(NolebaseEnhancedReadabilitiesScreenMenu), + }) + }, + enhanceApp({ app, router }) { + enhanceAppWithTabs(app); + app.component('VersionPicker', VersionPicker); + app.component('AuthorBadge', AuthorBadge) + app.component('Authors', Authors) + + // Auto-collapse MultitargetSRRegressor documentation + if (typeof window !== 'undefined') { + router.onAfterRouteChange = () => { + setTimeout(() => { + const details = document.querySelector('details:has(a[id*="MultitargetSRRegressor"])') + if (details?.hasAttribute('open')) { + details.removeAttribute('open') + } + }, 100) + } + } + } +} +export default Theme diff --git a/docs/src/.vitepress/theme/style.css b/docs/src/.vitepress/theme/style.css new file mode 100644 index 000000000..7665177d9 --- /dev/null +++ b/docs/src/.vitepress/theme/style.css @@ -0,0 +1,92 @@ +/** + * Custom PySR-inspired Theme for SymbolicRegression.jl + * Deep red/burgundy color scheme matching PySR branding + */ + +:root { + /* Primary brand colors - PySR red/burgundy theme */ + --vp-c-brand-1: #C03143; + --vp-c-brand-2: #9E2B3A; + --vp-c-brand-3: #7A1F2C; + --vp-c-brand-soft: rgba(192, 49, 67, 0.14); + + /* Interactive elements */ + --vp-c-brand-1-hover: #D6405B; + --vp-c-brand-2-hover: #B8354A; + --vp-c-brand-3-hover: #8F2438; + + /* Button styles */ + --vp-button-brand-border: transparent; + --vp-button-brand-text: var(--vp-c-white); + --vp-button-brand-bg: var(--vp-c-brand-1); + --vp-button-brand-hover-border: transparent; + --vp-button-brand-hover-text: var(--vp-c-white); + --vp-button-brand-hover-bg: var(--vp-c-brand-1-hover); + --vp-button-brand-active-border: transparent; + --vp-button-brand-active-text: var(--vp-c-white); + --vp-button-brand-active-bg: var(--vp-c-brand-2); + + /* Home hero customization */ + --vp-home-hero-name-color: transparent; + --vp-home-hero-name-background: linear-gradient(135deg, #C03143 0%, #D6405B 50%, #9E2B3A 100%); + + /* Code blocks accent */ + --vp-code-color: #C03143; + + /* Links */ + --vp-c-brand: var(--vp-c-brand-1); + --vp-c-brand-light: var(--vp-c-brand-2); + --vp-c-brand-lighter: var(--vp-c-brand-3); + --vp-c-brand-dark: #6A1A25; + --vp-c-brand-darker: #4F131C; +} + +/* Dark mode adjustments */ +html.dark { + --vp-c-brand-1: #E55470; + --vp-c-brand-2: #C03143; + --vp-c-brand-3: #9E2B3A; + --vp-c-brand-soft: rgba(229, 84, 112, 0.16); + + --vp-c-brand-1-hover: #EC6C85; + --vp-c-brand-2-hover: #D6405B; + --vp-c-brand-3-hover: #B8354A; + + /* Lighter burgundy for better contrast in dark mode */ + --vp-home-hero-name-background: linear-gradient(135deg, #E55470 0%, #C03143 50%, #9E2B3A 100%); +} + +/* Feature cards hover effect with burgundy accent */ +.VPFeature:hover { + border-color: var(--vp-c-brand-1); + box-shadow: 0 2px 8px rgba(192, 49, 67, 0.1); +} + +/* Custom styling for mathematical feel */ +.vp-doc h1, .vp-doc h2, .vp-doc h3 { + background: var(--vp-home-hero-name-background); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + background-clip: text; +} + +.vp-doc .header-anchor { + -webkit-text-fill-color: var(--vp-c-brand-1) !important; +} + +/* Subtle gold glow for code blocks */ +.vp-code-group .tabs .tab.active { + color: var(--vp-c-brand-1); + border-bottom-color: var(--vp-c-brand-1); +} + +/* Add space after feature cards */ +.VPFeatures { + margin-bottom: 48px; +} + +/* Separator after feature cards */ +.VPFeatures + * { + border-top: 1px solid var(--vp-c-divider); + padding-top: 48px; +} diff --git a/docs/src/api.md b/docs/src/api.md index 6c0de1a54..b0125b7fb 100644 --- a/docs/src/api.md +++ b/docs/src/api.md @@ -13,6 +13,12 @@ MultitargetSRRegressor equation_search ``` +## Template Expressions + +```@docs +@template_spec +``` + ## Options ```@docs diff --git a/docs/src/assets/logo.png b/docs/src/assets/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..33c3b61d9bfa436b886cf788ed5550d1f4b5655b GIT binary patch literal 159937 zcmeFZ^;=Zk`vy9Ih=_oofV8wqiFB)UTXc60UDCXg0z)WW(kdO&rNqz(NDd6m&_hYb zS;PB&&UOBR^V65>LN8&@-g~X5?&p5)W%w&qIl|jiw;>P+p@O{h8wlivJOpxc{nkzJ zli{Z_RNyZWR~aqWw+&h*5EWxrM>_{sJ8P4N?&i)e)(-Z(Ts%*?_&FY0xw<;K zh;Va5|KBUP9Goq=wa=_X!Cmkj<+WWP5H2RnH9*Z#4aK zcd&_f-5K44-6`BA|>qq zr&MsBj}u0t^yMKOPs@`9$##(`;kQojFgV$)uAz150a6%q)-qm1Wk7KNfl%h2|EVXo zU*lJ1?jU*5oPU@HaBn-)VLh|UqKCInkEbg)t2>_|D3SXRpWXFK-Z>-RG05^8T9m|-v zxtbe`nWiEE^6X0d*U5A$$PcB&IJ{pel>ZO&r8PQ(IYoH{3%p^j%Bn{*Go!dv+DPES;2b70yQlzc8nifV~I{ zlTZDP_EC`b@*{`8i)oki#Zk;&v;Oxr!|A{k?Ex8?9Xw>ryQQ73kXVf|ge!!Egv9#Z zX*zef{NUgMW_<+?&lUJC{@abL*^{`igL4kpw};{Wr#C-Kf1-TUwPr681RmBt{Sd6d zFGUW;pb^|M$Wx)K?s2038BO|Hk$xjn%GUm9X|J_*~ z^KPx$|E@uq@a7v}0&DZo2*o{`5r(Bdn1?@+XC{f_yWUL`#g+{glC?VRw6`xvuZU8J z9+KkZs9c*bX&CLLAPpXv23Dd8mM(uM4ZN`eFYQaPrzRGWn2jr&1#cw|F3N1iXCXNW zbOBETrcbJC#jN1BOQlc&u+m!UE7DP+f35Tv_>m}%WzqlxDUO#tISOoUi!bJ#yIB5v z=SUom5eCd=^#A*4WANxmJ&S)gqdgCEZCFNXhcMWChiJ@8JmYv3GlFovh1q{M%_Oh} zrvL6-SMh64@ykzFqSyz?M*kWwDxZ^j&we*I=<|NAWzvkO~KX`ib`-;Qhl@{cs>=Qym3}+B$Q-VB^CvCI9xY6e6^B1akV7sNImp;#! zV|IE?SR4D*i#SNn?cO6CJQWmpKi4V;ZIA-EFe%{oZ#M;>RAs!F5nx%TRq8jSGc(tQ z>w4TRgRo`Zg#yn~%>>(1{uAQ^4I=-qafGci1!659RmiY%~!>BP10YtsokflX>| zH5(C@HnDSft&ky0#|$B34mT|Z3xLk~UE5$O_r|Wl(p?2JFRp}#z(7R4Zxx>jiN1>Y zWO^M)G=_d<6WDZ5)1`6=SL?L?0Sgg2P;=|K?(3rXtQ-ISlAg)+ ztMcK~ZxvGTFek4HYhk~B5vQdADZzYTRrJ~>@&t9Q$@x-YRSaaz_|muv*pJycN^g!~ zUOdDZ2UplKnTP?r>g!pMTXUm;Pdx;lG57}T)l>|j&)~IP0?&ATW)J!C(iVj>Pha1jJExF@+eLKJ4!O9m`r5^y{22B;`M!HzpXo&e{wHhtVmeC^>1vS7!; zt^S5F7Z76%llKoZ&lYZPi z5CmkA&@e1fkPO{{(ThO}Fww!}1%^z_mKtHURQ_2PF*%}DO$8 zA7#Rq2bZTfU@j-$;r(}2?c%M%`BKb`y%i84)B=efU2WKD{mr)Ur3Wh*2U7!E{)C;w zkoXw0)~DFHc)A{ zEUxb(Fq6df!$>D;0i=PvrebQWNq zFC}jWg3}HSY^u`Iht%33-PcGQsj-!$t6j03_UTS{?c1DQ5Fov$iFez1`za<}GiP85 z9ahWJbS4rRE*!WDOAVP$L}v7>E-bxM;*E$w^az3N7W@nt;xsxIvl-Q(aLbQ8 ziEUCR<3N_(?cHnM+HP8ZvE8y_T<^A-W$0Ocv;i`9HHdLWMyE~qbM6ra(%6lc1tqk# z_C3;&F-}DL{C6843HlNO@iRBeiLWn0-YW8X!QxxN-OIpMr?=XS2zUXHf|SCa+DH|xjss&@2MijsjKk@%oNUPL~Lf~?mPA_=cGndJ2^S& zg_2yz?Pu1U6W8dSCa*$SSTC7g#kgc#uV z<^1{LIXB!PNaeWBkh{yJg9Rn|M2PCH^`wrx@l{DF(Wiv~_ktAH4$FAN-0Y!(C-1Ib z(XdyCf8;dSj<*^!MAB=c5v4`W#l-+W6ZV!HnDIgLN@m-JZVxfa=!^A6RA`JF5zFop&F(LX?KT0qrY)~1A;e%(wqj2>W|iJSaYRUJ5V=3g#HXh#c&9w zGigWB`tisdoqwd}uBYv-$dL)7zC_8deWP^k-c|%5I3;59>_p+qL(8&%ePnL%dBWP= zaMovX%;LO+R!^j1<{iI&AW=b@vEU(dTJTS5f^Dvla85Gj?s@14R-DQ>(aihR5Qp8i zh6u`O+Xt|XxmqF#1O-xO@nTVksFq6tf!B(NFvzi5_c-w!VXQQKZqkcQ$B%LPT=8CF z@>v2sOv-WAr|A8s^s43Tfse+OTGQ4ZZlU(ce99_6F+e#vD%~OntxNkjgVQ7+c8&xR! z`q|hE;0vzddjQ%l?Qs2eY^3g9TO0X7|Gs8<-_YYc#vuLduJcQwk`J^7wAd=Twt9LT zz4<(UT1U|0KL<0<7%DXzmDKxRd@y`5e4_s4A!@|18W>eU)&C~nW_G-GLQQ)9Yz#6o z{b>lXCFk4h3tP3;$d(RXJ3&9{%SSbtlzH|3W>Iq{W+P z+u~~8Hz@6H_3~LeJSJ&1sCo+1PQ}o3WMD6)48(>p;lD@^6f=wZH->ZCjTSk>U7h+aoClqBiu zFH_%*(tN2eNr~4@2ef zb<|JX4UAY7FQb4h{%%s!tPrsn)|!>qhhFg1^h+aa?2tAkwY#J~5~-j6+?P!gjErKh zN&Bsl`e_n&uMWCDxb~YR)$fD1(=IY_y>p!Q9G4nPbE z@v{+;&X!l}pUaQ1M^)bYC--#3(aDJhFLP#3w}+5^)HawP);IL9!fwPQ0cxQ9kvkY` z29L*LHQ04v{bgvR>cRSSXs?{=QtSLhW5WSwKDSqTzqddTx^37lZsHi@AzL8zM&*A1 zW_fV<;CF{>nDipF*lBI5koS{4J#hdO5t-^q=X>)o*BSkSXtUVlDnpS5o>yHDa*rd8 zLz1lqoWrKjR=5oPaK`JsXlAEAf5;4ieWK+IvZlQjL?w0XQ(dDt>D1k&SWy2xWK&g} zrsO+o!&`Fo_|Vk}mel$wFCPiRgQ;gMqiX&0A|h`6`RlF5BJKm7xb;WdF9-c*)`ly2 zt=GOCzdh%;Fv5-K`Lq|%CkSAEwecFh=H9LQOLlmtNdqKkSwgBd3Rd6bO1v~2KH!Zo zo*@3c=F?6ufla-(v))U$G_LqtlucAEJmb8W>nKDPt{b=AntG8AcY1Nk8N7YBg zb|Wk&fBabZbBVoxSOP$M4b<{YYg`5A_k^CUu3?^|3^`0SDCPOXv<9qFh?5>g=GM_l zL=bJAjoDzJzE72(goFRP&X1h*p^{@&RJjXt_D?{#^=3pxin52EvHEv3(CgK^t^R~2 z`oaRmZ@%3-%$%t5mP++)QdqhDkodUndRtkC^37Rf>zE4wrF(XGxSe8esGzuMB1w^b}2^O zkBdRJpXBEkBFNcbi+u6yB@twFw?y~j>$-Jd@31O?V^@Q|rLOVg?YOtkw)=W-srrQl ziibdA8Hyw}E$$UDN@+25$U}VzZL3SoR^Sf1$l8D28`Uk8I#{dU*=3hxCD;zM z#DWmt38|S9pvZ7~Tn*hHPP06AoDqZ8ghyepVtzbX0{1gavmx))6o&H8fQE9PIP?w4o`mhd*vyP z+}HTEzc;-DP4QK&jOPRkH?DMFyGcH%+EdIA8TqKVx32QPmEr!uvW7PpXRpW2L|ud) zW;p)*-DIjJAv#Z>=l!!E0+L~ZxlHsn)JY`D+*n(Hi%awnYWg#!uJPhECAE9;Uwdl5 zJ~glDF1TUl6=TC8dM;wzFUoJ`=9^h9weOPg`ysF#C3nHE+>dK1B8arJdU?{FQVm&i}92$0!-5_%2M`>b5nGA&IFDB1tqZ?Jl_I$V58n3iq z=f|U*TYm9$C$RD#OE&sk$Zl4xrc&38!}&Uwq- z?6mK(hJdy@^^){f)b6F(9(UmQ+i1Y}XlJN}v+_^AlMqC-{RlmF+`_}e6J6$EsFg4_ z%@Ge1y@y^XuISWg;ni2Y(4@50|2 z8u#xlZ9aHp*mBY!Cb~gyH}Qjf+^t)2Fkd&6PVbqCSdGD5J@e*!lVWNEdm?hYAwYyyPwsJEHP(UyoW8L0)L@08F`iS>XY)>9_aH<-48$ z)F}gCpfU0SL%=QZ7<~c2ZnJ(+UjalLNFkXEsNaEY@94k(tl%J`6;k%H&^cb6A$q4y z$H*xj-x(rnml?HD%O?=hh@cTtc84ny(HAi^n6ly>s>jYe{*-Z1|0y;-&|LeE#`qh} zw`v9UwBL$LIx3nLSpGctg*tj|ho+kzp3F}wp2zwoQj0t)>!!fA!QdttZ*uuD;UV2Lj?doR zvA6R=3<(YfF9%(`ymh@f2;gq3pGSi?I+1<6l^O4ZNvZXBS7P-5flc>}Ru*I^egG2b zvF&*=Ah84|G?O=ppp>R9un8~uVg=hBT5L2}-6RB)>Yvwv07&u5GcRPLJ~#K&`l~}b z>WIYVjxi#Zj6rU;We~j>(okvwlDLz8Vv5v{^NpIkV;_OhP#a8_0PeT;TR3%UT?Z{8 zh5ROHko86mvupH4tc5w6Ns)DS%;P>5FKcN6g-0aJF7{NQmmU)XpD`%_&=Jm0SQH3I z13VI&J63<9c&z*W`q=;SotXdmJvOiqwpgXbPfGhsab&0o6w)%Yw$R3=RsICKw2Nu1F#$C-9UZCXOPtL;o7kUJ-Pal$kU_C z4J{E5Ty%>7I!SI(Ct)<32FvQJ!wp7+Q;J2rH0(BPTA<>YS$7&)nxr2ec}z;IqN2@h zQGqS&FW>i`KCg-lpO$lpjh4AgyXeGz^X5Xiuitp^u&FP-dgtputlb>!Pr7xz&r=l` zhaex84jR`Wto$KVo>}L!n}y$7tL>;>5<+U{vkI!Aa+;A<4}RH9{^9>qVbc31gFCaw zaQ0ZgibgnHn<=y1@~n(Ab(_{(cGl8ZCbTkR$RnsHcH+zr1Vr3*ms1SZl}0FrlbbpK zDAQ&aG2or=7iZCQl{A6*jLRbiTDJB5<&u{BDaCeL<&?eE-Rm*kL8)Aas=5cX7x&Sg zrL!_6=1fvXv%b~>ARK&8*;tx${RTc4fz~7oWM%$V(h*_hsYXj2wWgyR0W~jkCY58m z9%_uc=`{L#EE8F9y#LhFbb@DaGAI1qhDa98O^(oesTyXWfBL(@w#Di_&b%f~T8yAz zo@um(zn=BaPJ|7htT9}<#ty|CAiPOu(yZzC(6cRF^WP^1wjK4gnmm1vXZ24Bx1>NM zI=y7`A2wdnIZRU;u;>W2)!NM>SA`4$NDc)g)1Km~?7>XlNK#Ebjn#Xzl8A^6ks);^ z{=cIflVvdlNbi5Z%Xw=+b&1+7M&G6yYA_tBYey5o#ADB_nmL034H0h+A8U|Ntr`bT~LPao;`4WjwVi=ec8 z?KBNfuI}n4UC;eCP0xd<*}XA2bKnB}oG&7Q!>^pZ;-(f3bP3h_eNf&-(l#GmI$NGE zQ_jT0k9AyrZTA93fUC%l)7NOazi?WjbjkmW^OxCAcCMZc2zlJn_i`1_G(}00S*e9$n!F&~mZSz}aafkhNjo(3epyYG+KT z?havosvK%jPWlur+`--@r=*5;^9ZwibD5fPzJ8hAqKQ+dKIbkTUqL`)R9?Rn8AY-;|> z@TY4VM-;JHj^|1*y`AT=TZjLGi`GfTRX0lM?PF8p`GxeamG1{-F>?~{0{W+Z2QEuRV$e{#p3d^4JbFbRAb>?1*lA90j-yZs*Nf8oF zXUFyKJjL)`TurCzEgnFRixIAB3zXG-|HprjBMShb1GL1W{Am8FBg*Mm7s0nBJ^`Sq zWaj0hhP!3+$};Mugmhx(p1!SfITt_Qyp-H?oI^y?^M?bDO=sF;e=srWR0gz~V3q*z zK?-1EEOEH8(h22a1xU4JD!g{Rh_(xVq4qT%Wcr^x`tWTb!9YyaS)<63UH**XQP+E% z+A&wyVQU3Pv$q_BZ~ft-QSdlLJTl-E0u0QC2QDMXia8dp zadfv==+L3Pmk`jkkEvWM-*vg+ZZsT-l}X8_o{Ez~SaP@7fcujh2VkCH+t744ynPHf zefN2VcQFv;)3Lakt3D=T$!>m%>GmpOL+c-B65g#=&a$dY3zb|2pm+i{;wL+AibH`D z4VsTO6L-`t)YBYE;8SH!m-UM2gEwIgxG3_$)0EdZ@ZT&A>=fzAZ8DZCPmK@E$V)B) zC@ijYm*o+wHya;3>gonF6G=OfNPw4{u;0O|>X^9elnwrYYBf`$$ng+|CG3MD{*QWB zN`JYf8i*+kDz~}mt=nw$<3cm@ztNBsKP17%6{c+E0;6t@tq!srHc+PFc!fyGt+Vy9 zEK!PdH{rzS#<3K(gVQ);i> z5viotPOn&7jNst(4`6yjYlV?^To^1S{4 z&Y^dN6>9UnYMJ%$Ht-k!YxbEr6O^9^Uue3XrL$1FZcQeqZNPbsi!NO^d=HHL;ZC4N zil=Hin2%=v-JJPBZ`HubX_+qgdC*Th@s;T<|Bsn}dj(&gPHdQ&Sw6L2crZ}sdBgw7 zF&^F_U8HZ)u@huf|6QD>tw>l>-apb1z;Q(4A$iktnuIIDgkCo_94y9>B!1%EQ|7gz;F;fpj@cV~KVU*Ni2~wWLJyz7NB#a` znMgkV4z@VAKN<8=)m7f6f1b)6I@t!_*Q{KkE&7zK+XC?o7P&P>k4HnQMOH zg_HEcu3 zNPA*FEy9u;70;G0(@PazpZk?r%^M~SKUXC17o`2tRL+??w-kDOs&eHvjUd|O3DV_0 zqw!Gl3Bb&wK;ANV?G(w$4_ge_kVVBRpRKCNV|MO| zjj5m>?5@6X+$6)N$^mNG?Tc~e(PCjUvpl)pSSDf~J5Fj1`>aDp&gwx5sLA&y#JiWa z`|n^h(BvMH@K-Mw`^5u28smdNTT4w~xROF(zfe(G8reLHwiSUq5zWE5C#$xy4Qi41 zbuD5#tFA4#si@gqXW>&d8dKjuP2dw#ctEMh^NF70Qo=DBAgDQzg%<(5WEJP7ZDpq| zOVPkXxYvFtbJWki+V-8m)w4t7D-90lIrrmE{~b*ga_M{Z(7jOJ-J#g%tW&>#?^j^hjNmpGSa!Dpy@+n`tAo?(ScUNg89BAEK%x$ie#-Uszi7?4UBNi z9Fu@jr7Lc$;>*uJ&lJcchmV*ytWAW~{^OCToU^xc37QAt9?sUmK(TATbOJeq8&~nS zQ!{iSe{OWh-*9)HYNfF*rr);uM{Pf_$TPb?4+k22?&E~~&2u?Yov&z%GKi4T_*7d` zvyf|=NTlbv{NR*$tF4hTLhG5OOd;WL^nE@&c4A{~@*kr=`h1pg)@>wxkNxAk%D*zB z+d~Ky-vFPL^CFzMdB97XEB@`faM$9isP+4X6icL^2tJ z7IwJ-y1(y)c7|qpA#NiQP{plt$;k;c2%!Njz<235k!3N7D-t-rO{(S0L{)+!s4qOt zM!U}WD^DgXPa0ocI=FAo(tUEi5oSR(?CmL(x)INmpc!|tArJt%*7l|H<$D(Z7T>!b z3@_;VmDRE%3A9gTkT=1Muct%PTS~j8Xl4LVg1;y3ezgApP>W8 zR#IC-Kg2I;6U9|zR>Tdb=`|cG8_N+=4NF8x^mfi{{0a`M@nU`>A*}%uf@vll+tHKZ zdF)T5wxD7)BeWZh8Qche@C;72oIMXY*t;Fqb(@p`O#$t)X699?ltLsCdjfHfOCW~g zCe8c>KWV;^W-g*s@j%e&4y88byRA}_lVALs(~Wp@_Y9Y8F1J6@o_U_t9VK1}93>krqJuZ(?@ zU+bv$&rcZocYc!u{bk2anR&R}6Zxe_po`EA)C4sy8u0Tgv*-i}KvT%E6e}ze;rel1 z{yikzVZYLH|2_0)#HM&{fy$n_rRyBQC%SC80t3j<7QaE@qIt=8iHpE8=i!Qg3gCzj zuOqA(L>OlyLtG-D7C5{Pwq#2$kuTGw@V;v|8~s)A^)o~C_e77CyS1ruAo8Y3Xl2ysCqmZ0{ixfuC5muuy)iA5?U1HDG2UBaI;#O#vvJn6J{53ZKeKAKPfx% z?(_2ld85o|`=*9xmz@#EeQkm~2b+OIXW|w_;~l6Ym-%)cGq=7QMzhH(OC~j zgRUUWP!YKbN&&B@)|A#@P~>c6NcP*Apg&wuMd>H1X0oLq?gY)TP;5DybN=_lPQNC- zGo;g$24DHOys>6vc;tbA&0WD+55iCA2WlGdKGT&|!6IUaUrTh~X5=U0lr%b^FPLHz zSBf(Qk{*vh$)tfH1L$l+aDhbv1X&&5Rh=S8-rRm5Q|eNC_)3{x@{MC{i4qv5pLF)m zn)U>p0Yg4#~6U0Nro6+{zpHSX!1Kb8P2-9&Xq7r`L#%K?bq@Uvks9z3gRzGUVp z1?)BuVhl^HB^jRZv9ziM5W(y8-=zGoy9o-m@<3NONZa&9DIEa$Mx889CD3vTbw;PO zYhur4OUcRwkh7trtH&o22+2X8C9&aN5^i0+Qu>cHj0QFuF7C9gF?ZCese*CALpDG| zvg|PTM@z&C3{Bkqg|hV`%(*I)C^~k89%#*e;<3xE;kW1?{!1hBDr&3J=_L^uFIbKz zh(|Vi-oRvd-_QdPOk`Gm%uoA~hp-#lqb+!XNN@f`eGdj8Q2}&z1n}>guJqVEneS|K zM?h6`2Jnc)Yi;BAm~kxroNaz(mE4cN{9?Qt^l7}u*diA0CL{I^w={AeSV7&*M58x; zKQ81oY*=flT`JlI10`9z-p*=WF2xhe;6wRgilES;AEphViqO1ja?pr)#-vR_e0kgUt5wNMa_N zd*==fk~njgLSMnTh?7U|<1{M^KW;!9o^2V2<4e`!Ma;xL6wPUl2uV)^4g7$Z$0L1j zH=@!;x14=hMhBc)*wLQmc!vJ34tY;l_~M+}ZpN5Q^^I<}DO@q2j;u1&S>S~4i!w^a zFF)GwD4Ac=R)hS0+iz$!s*wMHxL?}U!-z7c55#>>4$=;(cqz+B@9wq@?lX3 z)P_(SVjjRzP5N6I7NSIkgDv%X)V41;Yf#hupz6C!PHOIT*tQHDZF4OA;^hJ^0?*0@ zdGrH@8j(K(Y6QC5jYO_|=`7m(V}fsdzq0D6-Vw^7&pbQj1WTtlUNZGdjVQtv-e}B% zyxW`}H*uHW_n-Y?PCSzFKH9?|t$!+y8cZZuoa>`J8Z6-02=eNgiZA6HWp?P(QWLm= zP2q;>Vxih?|0jSoNWFKTnF|O2)xZFt`Q{_g)`_P!uexd2UnGIXbMYvzYTXwxzx52Z z@w>*>-;e5CoEWcJx zp7wMRgU7Z{4!5Q+m#J4pYc*ZXy9%PTH>N5Hs-bUPYD2^kk|$-KyL#gC=!=$qeB(`$ zG$rw_Mn~Bf(zZeJPCGT)qg`if9JzC8T{1!_G2;3bBw)Z3)OBvf|~9}|t! zU5{77E^~xDc5opDzKY6b-`@koV%a&{YuJ40AWJO5E#f)~xS*)oS}m>G`8wuUxB;m5 zEnFA)y#kyP#T;zw3%o85JXXwy?zw<@NNFR!a1@Xj_2_F(BUrT;A_>pgLG6#n)P56L zfN1XRus-c`O)&E?x?-TB#;86yFwvx{J7Ff|guj23PUiidw@QYlnO;ogB2J<>k?`TdhboKUwi=aX&(6!@_N zsQ0y?v9N?d_c=sWeH*PwO1XW)&r07vn5$9^FNZn|*KO3NO``4muHs20{6`ZZ=rH5n ziblh!{*~&wd37doa=?rOdw#OCy!b$-n~;H|a3_89wy}5^emceU!Mwq3lTx3Ii8B|A z`{r%u&TB9VWd|b$tVZleo^+Wn&D9w_A8{~g*5V(aP`JD=nW!%avz2h^;E|Nvvu=|m z>_i;_##!>r9W9v!hux6=_-ri-02RX3IXE@RI-NdMGy( z(d?`T7AJi%>>6;mO+Rwg1A5e$1ZNxGQAT!2#zNU<&#m#3WeR$8$3f$uv{$a9w|<+9 z-MXuHK%4dMKu+;Q+mRg36t<{b93aM$klN+xiv^5-J>qy z>(PTXH8?yVxx?`k=}Nu}hJNL~izPdX$Q6VR2l_r0yrFts@L)nTixFYMXfCG_8o8Q~ zwrFi&_il3nju>oF11(g^qI2zcDhQ&wt<9imv@-$fVuXHSaC(+HA8mWm#g?S;E^get zltyv-%?n1h^~XkqYj?LMv!nAkRtjx!p&4n>rf7&uR4$|AOKMC!vLRQ8Bh+n(k@<5exvvLi+eiX$NbQd@b=w6xOJHdt_Q zi8-24m9Ybq6`$WcEzU22J5qy|x&_zISVYO2LKt=wWKWIsS`cCYO!@mqJDepVM>?&d z_2U(G=gptVrL>&%9R@uyX$BLUJL$i)zm>Ulku+KBO^fFCMpeS>KaQTw)E{Aa}!PR4+4Etg86HZOo3?hjiJZp za87ec;I?n0;XEML)Pzq?Y>@&~*(}O~%`@5bhK;Kt{cD8&mT)Np{so=hAXY_@Xyt&9#qHPvvF*a zAwa{{z7AzSwwe)ZcHI&@-Xol4X5!rcj7pHE zyDAsFY9-D%XZ7beW65*L$UZ)KnUQu$u~N(>nRljQk`pKh&^zrlbQ;6(HCQ`_`7T}M zG6trAiV!uwN*f4SPQAtN#qD?Uwo$!2q9?J^Z2O~{&x`M%fjT~ypn)?mv40>+uTT{R z>MtsY7x8A+h_!0}%{fXSzx1<;h)OG(#h$$}tb%7fD1odBiyrG|1FO5-J9W^mb+?az z3VyqMhfOgf>$s=csLlxRfXJ~gXQ*f0yg$fFHqgJWv7GlC6jCSq(Jr zC|4ID0?pH>1(G6*TtSFq)#m0G#HA!C0=3|p@9S<L9Uo7O%lehrTuU zR+LTw976~Ogmz+}A!cn{&gnlq51lPq>SB%zQ{gFjnFcV3D|&`3b5`zaIZ41z%fD<) z>=(I~L1rF3zcSbHIDGtcm%F+Vgkk_;v{R5VJ3sLw;!7bKLPCot`Xp!HN2(^Lzn@Df z-R){q+u1_+wMq&4MEqe}c*MIZ2a+NwxXk=zAmu+oJXw5-+3u`;OW1 z3W=rtjWbstj;Y;qep!Sl|5!XvMlUY%ccDdF8KKIib&qL24(_FeI;U`6`u^T1LAH0J#V?5c`U02h5 z72n5Al!9r34UnlFXa6IussltVvU9+t+dm{LAx*Y>v;#Cf&GNikCSJk>mwGMFvV(8^ z=Ci#K5i`;?_E7~Ob_KsfJ1ivuzX#2k_aw7cJQ-U8j_8UW@}~4}G;cSa?=Q_Hs0|J@ z<-iFFAMvcq8e#lviLD8oujT=r;^17<$6~{5go=8sWkvuY#Kbog0NA=cNVTgG zc5G+pIvN%nOr0@n9IRH1;l(z|(7T1h^!q+1?||A-*KJfjXrtaN&vO4n!7$slT99^z zC{aKH0S9XJ-7Q`J)}?)%;>!tV#cjwXIPowvXipL=1YmPB8Cr%V9-*)Vgv7b|d9s!< zK&Ao*ivPb6<4%XTZMxHkpA5WHeBflRYm>r_D)77&N6v57I`Uqmeqh?DX%4)FUf-rgsVAhwJF+UKRdwpkDkjCX$Is4~s`gpw+u8lRYFFG_PDV0(xCDI}J<2R}~6e>i`pSk}MUec4~ zI$t90kQ2Uj4u+)RYNvsm1&ea6`c~bmPECL33-yG~Ne3u>Bp3vPpxXV_ z#h!(-L^R03M)l>-1Mfc;lci#zSM}g-I!!dYXXkg;g+D8x^f8=dZ1M`U)QK~@uB?%a z3siwhU!S!pho=@@T9@w**#T!{KPu)t+zjCZfUo(ThE|EWI9Q7^Sc})3@WW96kfaUl zKBh6dRIv4=-GMtzG`L9Wd%OR{Of7Fq=F80fY+CzM%=AI-z;b^b%8}9})~Gm-9uXNLdX&6jC>dN%80ao(hr5KJqPXR+29 zecZ2f6%CmCz>S zCHArI^DkP#0WGm2@cQkIAcKy!Dm|}XerpJLWK_%RF zw;Nucxpxo>biz#l=9~l6T960kb&H^jt!%YawzNir)Mn@R7}SSfWfGPlYTqXN1~Yn5 zt@Ayr+?`sVnPx-}mq713AL3N=EH_b*hGWT7*Ea>=nDmT$w~1qe*XyMkTh8?@e8tkq zbS3gjvZ_ZbyRE%%IT-C-QzHr%AU`R5rLh*o1jgFo?j36WFqpmw&EH*u&gd18kU*1r zY;-CIWK6*y&I-R4e#2AsCs6Hx`TB@Ckf97EL>de58m}ykwYla(LTl@X7IIIl14{@r zju$Iu_3jSbPfd2k&%J#d3{K6=zKfB!J!U`(P!mRp!F<)pFfvlVow}!SBO9o6!BuV0*cpl>C37+*^WU`1(X23+Y{bL zlnltjg^&iea_=>2j*p(Qc=eu(OCvcbsr9rSRb$Kvp@@;E8(W(L8<$bAzjB zbM~4Hq!LX2FjqV9-TUUJ3zQY$%wo7hLi12AKW0dcKMfc(v7JeYX4e_PTagV>m`OxV z=~eY8eOm>Gm)mwP3lkSAgun$H{*b(Dz;A*WqGTe8eNhAf@Y8gxg0iv2?k3O&LLwVSO?xtNoGhm{q z=L3tlvz>EiYFRu9(;GZ)msamsczQA}-{OmBR*~D0{rH*gqJAXL((-1;P&A8+##8^! z?A6_RwzqGj?Hj)O2!FHwhM9)eAFR@FLd%N4`Hy_P*m%y8jW^|{^rd3Ep3M*>LYBqYQsyBN${G>QiGysbAL1%cS67JLo$6!KPal@e(?3Az8A3qe;@38Q6wdaoxrDGKEqBC9=6vL za=Ihv=jVK2rLSi8)TXw|aXLqW{P813I7ALvQuxiZcWSH8&jo8E(OYU=&74Kf%Yq7J zAQI=oP5A;HD4tWO&}izjWl^Zr2sbzx(ve8{@mW2>wF-?tMR*6u8NuP!uy#o@=3yJq z?Og=^Sk-E{2eflbYA2MFYqTearoo`va>DeMW`;N6c$z}W_}0W&XC*TJ9DF9n#q;dIh-`^8 z=(4YVO8_ZK_ht;Ge$xh_!$qzz3-W^R4}Y2O2-k|LZr4ZR@r-xXE9Q!d-!s9HG1sl3 z#$)pz?*F;<@6C;n1pB zhEq^lCm(G7Vb^(+wyKc|1XDu(6GHY6JlHM&QsV=$v65%-EpVD8{65nU!XJJ5Cv?1t z&Ha1*N?%JvX>v-Hr@{*Yy)ivLrUwi606AJT@SD|&P6JSwyKL#-d*Ctp9s-D<>Or{< zSQ7`3olQD$eW{-EzO*~D3yGqUj`o{*z`w10O1O6Kf~L1x_dfe?pReDK(4j;$7&T|o zMd!;W?2Yj?#qZBcV0+@$M`ZC!@!*JZxD;-9RXr#_M}hRI10>vby(?L*HS+rfyBie# z&eMp3)3=nvXLndLJ#a9Rns@#-5y_0PWK6BE3uVW)wK_zVkY+#Eh3#f*{(j~yIDWe} zmW3yJv9O+c zKRD|7YwWLRHbNa}$nWjBXPo*7d|Ue#*}TfZI^!q2mW+HvS*jOa#A>ahFzlnQXe!0j zP^0nGp_7q3o;$)qD4wCjStkJt1!O7KqUBXoAcEFG1SNY`%7aryq};MZVu_hEN4F%s z1u}uSW6P(c4jd6ybM_!2m`6WX~h{hn+}aHCIl_ z{2xt*sp{y6f!fc5sJ8O>vl!Q=O zLa{_aNuPK)@|-q`VQVf7hc_S<=%ap=DoQWi0OE$0o9D2EPhxqrl+K3`iK8em;92y^6ro`okA}|N!F{dCefox#}(3*mP(=e zmohR6eNCG0;sk%X-ck?)|4RrfFp-4OW5?<>j$KBeM3KDsg4TqU1@Gj0$1 zfLrD+ZAn&^e$Q5?momwwsRdZ!{0oLm-qHt{yP@?8bjX?8afRG-^@A!mzGO8Aw%ot` z1O~{ewYfsh+i-plmAMMrwgPOC`L|fnLWBj&v8`M`M*knCzA`MzwQCy~0g)0=8c{${ zQcy_&gOpNG5J^etmM()386A+6FbL`H8l+pgyGua2zjg1upZECwdN?;%tX}6jFDJ`q zCGQ4pxb?xzPacZzOv{dqC(3e=X*^Za8@7F~=c-y({BN5p#05-5D+CGFc=;=n}q0ykv zWK3eFDBXK@`fJr{oU-K|yXx%!XG1Z+hZCQb;s%pivl@^0}hRh1{nZk+b^66xIHc+dwgO1L#L&2|2 zu^f$dOuDhVcNm;AAY6rE;ekRSkL^&CV7Nh;7ct3liD*a`|ApWukK1qai?L+x3-q*W zj)gP#JHgz0yQa>5U&d*Rj)upTBd?4{77a3}JP`LcNRQl%51g>zy2gYgfM-J=-+zqsIOhlARZgfwE~28L;vL2KPE{r9K3# zC62Whk2-Y5Fja6@{whBMTN&&|lYSX(#*u=NAlOvzKqe5aLOOD}CETSyUDp_6_SD;? za>PmU+~QOl_l&6^+|8F~7ctiINdsGaq=A{+haVdZT}n$@$@ie zPL5Z3V%jF@E@yn(Cw@SqZAT_F;&~A|8ZbU3N>-S)pVA@AOhhua6LXQc|5J4M{f`o( z3ot9{Fs60dh%?slyu3Cr>sU>CZG+J*Gd!N7h&d+T|{8Ll@=d+wM}?6zBGFiG#VzbdOK>_CK5*x^qF7@00dw(MS(`17t`4te*-XJq1{2#rXiCH9l{D%@z}!vA&Bm5vC^MP2)-8_n)>^ zN8T~nBz_NYeI#8#QoKlyG19HYA<1a`fK0|f%VU7d^gI0&F8%_@j9(pbT>3DaY!ISm zz(N(FaWXy06gh75yzAMz5c8?VFH8%E_C`A2 z`LcT(n8O0Apht!Q8gVW`A@PzmC!=8lAaocWp-C&jD>mDWm7|9+PWIY_If81-9@-_t z5MZ$;bP;cA|w1OK!MiiEopCY%Lvj zq=o@Vmuq_i+d4npuD<)UwpSFs^A$P(B%*1OHL~O;KmY?>K1_66Gjt(^(F#x``N4My z7hC0ZjAs#U9pK7t(;L0r=Vn( zX6VhhfRKr8(F3)2_Sk0at<^w5c=EXWYr?EUawzo=R@!jgO#}T__@JyATVmeoH1i>* z5%vvQfH!GhYaV@@a!_^J?+_DRYEGS@hlLCM^`|*jb~LM&y#F=nXzX>p{e!(Y?}Ov? zG^Kg4$Vz%dC)=AkDQ>?${#cx}yA0K-8X5AbgX>iC9f}__9ot&lvAI^Fj_uZV?)^@X zWIO!K$TV)48@(l1Ypo$OL)lQs!`Lv30A(PzU?1m?Q`Qlq6q)Y83vbt{e*ezGF#M|T z#WY4tVFJXabKnW8P?gqTivhJWCsssr12_(jW>QYkne8vFXh_Nekh-GC8@{g!uQVnW zp0ZM0Nv0UyDR_2I^{Imo(_vwr=Qy>xrDpR|2G_H)2gwcfd0t^V-!psr@K1O9ceh&Z zOHGZ{RLgFbKH4T`KEP0udw4Mmjfq83%#*URilI0z`~sM?BDsx|-$e;^p}(4ARS2A} zU5okeM)}iD+)OK&_B0)-btsw3FuZ4M+j2JVqh|E?afTge;X=&x-(! z*gyM;^?mBXVcJMU55-$A`!yQ`hIQ&OpVM7gHJ2vl_Ucbt!e-`t7lp=+>;`>c6Tmh& zG1+9wxo|1L;ET|WzOpvi%9P;i(iz*it;PQ z2vw3$$&&Xc>tJlu< zikm@e^ws64n^w@K`~$(>HzoVzq-5K*qBW!k=_5TTnS2u}4}z{MXB;#@t8OA%1;uz7 zh~Hjg$CbcF?CNbkMHn)6kCzJ72Mr80sUy&l*r$e>#xqz4pk*O?Ii!rre|Ck^p8f}r z3)LL%&zE^m2jla-&=MS^W1{P_phYq$P`^a2hD!;1s^8-^o;rMjL9kfph8aSPlpL!w zq&#mPB%h`>u56QUpZr;=zvL|orL+B;MDlHuHD-6!6sy~I291NLyN;m#sZ7|B+hTndd185Pl) zHSDwS5Ys5yJD1!IZ6xHoC0KtS_Qv?Plb*5@ zMV{#+r7LeGlU<205OlwP92#l+r#Gf|%~GJde9{Ea?S%2tMcp`Ps`r`ngE=-_D$3rU zA&Ydl|55Dc{P|1T)Pzf=xB;-Kf%|pW<&WigM$cP6a)*aTbe#>=NbD+@i=_E6 z?6VBAsy9@hBPFFQXPteEX^+Ns7{i=rOZV$lZi>~RzP+|4)6J4&h5nn4J(qD{PIVza zvk(p0*0b$S(N~s3NgW~z| zlS^aXJq=&7=n-q%?2t7HcTC$iuL!QhH#|=!+M?3j2u2$|mmix_XT*CycV%|nlZQmI z>pNRYrdX@SsS6(fm($qHWGx9ZpNHPhN>$Mrc}g@QkZ5_}ZWRita_tO(f*6d$X?S#5 zSu*0@5E4I7(mlSKkD<~s+jn*fuXi3yVZ?XFJ_ z6;6`#6$F1Dh9+4Fp{Fli#|X^+*p)OH+mc=xEA@Z0u$A=yvKCPMKl*cA4w`vJHD=>_ zl%*+9XnP8Yi)+edifQmFMYtVM$FTvl;hb#z>D9EN45Ov8k9nuRbv+9Co(o7-7IiRr z;A+G=Ga34QNQ9Zd$&c^&aL@y)K)qCmFj-vTsLfkD1=QD_w#lIf$uT!oJ&@@ z_!#r{rhA>D*Fo!P+}~q`vrD&BXvVI29J8Dz!o4+kARPv6UxePZBVB`nANATeg; zc}pN>HK9|G<4a?59=A&fQn{C=P!>n19^-l?EvGtYP>!)ihMj^l)_f zV_C--(=&}``;4`Pc1-@nZe7K>uMB)aFh)P7b3hSube>%Ilv$hK&AtTyiQnHmV_gG) z+8&I!Br-@&m3@?K1w_HHSs3fx{A|qvD>`PI=NCMytPX8;{DV50U#zSv_{_Qc)qM^U z+*iqG#FazEMbSrY70}91Qixmu>xIEGDt~3fJv(VC?)mcfwft9CR2^fSPHy1PGnDs2 zz3OTHHQDI#LbJKEJ^g~&j(;AGx$TIXRES3KSQOzZIVfl+?=PJv9-b$4aboh%6fZl! zcVuK;JkLoGI|%69)6Z3CL(Zb@tY|3F)sbO&+?a>y}^19}J?Itu}96x3`-1^G}j__#Pk#G$$@< zSL_q!U+AXaPVhWUtQjEKoKrZUHMzhW7UP!|oBNq&n?R$p1Pcqq70{@o06 z!00_BTM>7`D`8F3EI(4_vh0>uJw@6vy+F(HG^UFe;d3PLg50)z-S2~Imdo);SUd`R zmdJ=x9gWLg{T%?$l8G9>Vy}y*tgLurJlJ`Wm3oYC<5M?n>}?pIarct=P;L-CV;{Q6^PNV}WWc z^-!VMnQ_MALGJaAO4lmKW0O)>Z;#SP2TyeGeV{2Y`OMqGq+$PFlSwvdd};x zOJ%>xbaSwrL$IcnvHuE}_!oqDXKd0h?`;-mJF_Sn9TjW%cfj7`0jDMJzz`?#xiwr} zpO%`EE^OPptMSw|^4-md z8x{F6gcyc{ZglI+_mg1hwF)K>gYtxZy>{<^@P0N-QzR&qOD>@>}74C zK0^x)xSf>&=Vr~&XRPb`5Tb;S$&~>G?ocoQch0X%motC1$78uEcFFTt2<*8(z)hw_ z2f~?-BYRga^4s*vbrapT;p3bZ`{@0yh^B&?N82FLd-ontLs4;*1E_qB)K(k@&{wur zA|)yrwZ7`DT|T%p=G?@I-~L>TtQ{$ww0u4;8Ljh1oH7 z0p{O(`s&C|-LlBq-^W`^^WL2gmO+H*8?Fz;XSZ$4*(FR!D-gqW|P<9^0W zW1#C;8G2rw$>R>jdU9)_U%WK>t%>ZZTi(j*x+P^$^uVpSzOyzox zs~4_yYiAbrlnhNZ7o_ZsMy{5uk(W!}zOh&c#>%vI>7Eo@2{`LATs~Zp9JdeWM5j5V zR5%T{&%f3P*jgA4_AKKvvwZnMH;XZSOSwa#MeavtX=X38GfW4pjO#R^+sf!XJ2jEM z`Ie#$+4B}w--=$g2^s%lvP=9v)vN?JUj3q7T3(V@QkVtu8XHNWphrIl+Tz)0_EaHd z%77Yln^{nBzSuwI2h`W6nHM+Ope=ok0zMMXKcDJ z1)7N%ywmdyY`q(0KYKiK`$QL}f5*6P%36*TaDM@E0clAM1HQC&gQ@5zst@>25C5Dg z=Z7tiJ(K(tz*ae;zxYUBkRL46tFB;A$AIk`wt_V4iVx8Io6B9U?<#RmUYM;u866W0 zOFe$dDEf>kv>`LgLci|U?bd5N$uP~bGEhv6)l!<0(bax`gwFpyY|UaEh|t%#)o7Vk z-igrB(y~cZpxk$WnjTt78=l3Pq%LuQ~wJl1Wu-d(a0l<#(uPr&A| zHsEVfTSQpvH^t2Iw?%#xoR0P4==$>$?C^^FT}$*+`Ma15!EOnUFeru#hFvU1YN#*( z5jbcltL`PG9y}6$^WACL8hMQP1S4>^e4;wXr$!o|+HE;kci3W*t=|?CX&lOaJ@|&5 z6DQNshoc3)7>FyJnNZv;9AJvv7}m*tZ@Ls&=}>=Jth?e!;%Q+4kYSC}KR&xtjT028 z;>~XiB?ssYKqTf$-dVX-W@yry2b_KBI;9wA-RK(kEHCWbZLJDc|J6CIurKPLh0#H) z|3?zW@W$${{6zJ?B+*i6X=n&)fo%GB0?{Gmp!w#Zh8LMn$4*XGDoYZVEQ>S=7I6Q2 zX6w5nLi=2hDV)u++(5VG{W|q2f<~4lf=w;J=krr3?A(cdmdg~G&HFC%wv6`*;RYJ_ zkCDLJfZC5Bk3zntz9dv1*9>wn5HrzWZTd|r4iNT%Ct9aP&fs&=Us&qU{p9AFC^ z|NJ3!VsMDxA;v4pfVG zlYlEFUJ5p20h?LF@doZrT`Bb5XoY$d$m2UwVBimo&MO#JI=3m?crd5`B8T`Q7 zH>Y@l(PR{SEW_Hm*W#O_SGf=Z;+FQf->DxdM7|N|L$G?yYocSsbzj^l!DI>m#OK&^ z&1>uM`-3lums_n-pJ?xJ0l$e`+FXcX*~`#)+_2T}jDfU79ye~^FpB{z)pY_^GH*jU z)pfuzJm9=nU$;NOP;W<9G1YZpx2?s8=5CyAdJHg!akP>2^lEJBE9j&t&%EZ62@Q#+ ztnf?^3VA?Cwpl&Q%nzCA&vfAl(s*)_0U!=u{`*J4L74FeZZ6Ru1whSaoTkRgaRC-; zRTrTposh`yoqIc2=#0DTui=G@b*-t##Cjcc??L*8lqmddTBi@=kG)A&J!N1(0q87< zIvD>c#124ep2c&`c;_4`uMM$b;Q^g)C$Ea*8h*JL*g`?*mjO17G<*kTf{WX&iSD_3 zj$z@!58il{sO9@GM`DVm8`?rIXy-N>$2qrVT4}*z-^h~a|5A#uADz!Mg7T(Pd>;Xl zgd{V&zC*c_%V^ZKdoDkKdEp*GT*?+h*o+M{)$Q*b5b@a|9Y-CQ40EU@ZXkkbXJjO5 z8=p@AI6w%tXg!|q4nh}`=m*DU5aCJdwxLb++NSs4R0^N=0+_wisZ>@!1IjgRG<1|ozzi|b*b zGkCj;J@kPz;c;K(?jU6BB%j|1;>HKe<=vl{c^JrQ;|@at5;sqDp^-@ud%f$(_zSB> zu)Zsoe4)DKRQ%c<8Gr+*h`x!5t_%~T>PymZ7*9ML#>T2OCutzmGgjb$4tOSLY6qL= z+Mn?IjBf0P_GJ$yseN_%yqJ}|LnV%Y44C*yTf3pPj4~ZpOLDo zYYAeZ0PDsKMZ6PcXaZ|a-P^REY0RDJzE!n~bOC zL|amRCZN3TZy9GTUbFjQ7-1>uyexO^f+JkzfH5XPKg(QD84_jP+@OFKKOSS(oeG)58E zmJM~Epjy(5%a~$#GGPPd;6n@m8zHaw_!pk-23mC{WWDN41<})uMQf!~y4EE#yg(0R z$#2T$cF3w8ETx>Hndga*ROqCaX>d4Yh8{vmHBBQig#LYteL~5(%g!Kf_FxOx1R<%3 z^)dBLQ!*9NsTfpH;Z$k}AO_IwT=h{Iq6sz<+q_!+YahJ0uik$5$FtOSpOl6l@>QRr z*?Ja%v;;E=`EC|<=wSRDD2aa>)VI-j$eahU#8)TvP?(CS+&#?t+pdrobzJ|tyDV>} z8%J9e9MdLM}c zM*I=6#g>>}8#XL|3x@jFten&672T5wk+^70$_f>$6f*`*C+hxm#jAN&{Js0uY1ly+M-4w&N#df+TPMof0!Fp_HNx{Dd9IUL zS+Z*?$^)l>y*aLh!bB(x!cM6J5HAhOjC9u|OLm1~#EiIM@??q`m}(vvisLZd6zeW{ zAY|PU^($;%8^x&e!K3TEx=$UT34q}infCbll-25b1#YHh)IsrV79?3$HzSH|EGkPePK3?*)nhL?shO zzI0KUK#YD#>Rb?X_4kIoB@pJr;IjT4li`N~-2xkfUhNEm=&jF=Hs1r5W%R;CQfZ0u zvVYzw|2W^0j3h2!#cYSq~E}l#_f2~08PrZBfJQxakvm6! zQsC{Xk(!0*!?N&j%tIZeW1{gBh+1v8nWh&9s|bI4+?MZtNC~a89LqK57oIxe2ob{~ zA(y78^HryUvFX@WbISORzE+Hy!+BaYsE~9zp2w)cLnu?vzg|wJ$-b}v17vpsKC|@) zS43rv?J+U-R~J5)$RQI69y^pUQ&7r#%9^LqIs;i{D|B|XMym2jhkeuf5`L#~UMF)p zIA81v`eOwR=EkNb((obdDegFb?A5-1Yo5yGIG?6lfVMMtnb*i;f(7eNQSUUmZcZ7H zel74$R$w{SL4W)n!mlBlCt1+5BW}xysl{E0<0TQg7IS9is2$5V8pBK~b_Y_pzH1$~uFYX$verBZ*^U;Ei zLOjwuj)@%6HzShr6^Rb9MZl&_&f4wRn+K={QObdC4?%eZQC7I} zV(@h@H?Lm%B&QAk6vkL?X4UI~n?tXa$em;Vh?J~u@0!r9HL$?fVdgjn{Z==WrwROg zIY}Ff$cw^N@oy)iEs`kgnnE>zI2y-Hw9KEM&7!x9IsZ>#!rT4YbM!!XhJCFMln_HR z7;zU+tw^Yk@HNfsfjN5PYajCSe-*^bi(WDfBXhTh_Zs;0CJbWGdWzpjqixqj!vwrhwBv8AXJiLsd1R+#{`8|gxI!mvG1_uTlp zZ%+9#TE(CsGG*@V{gtu82r&4@8!NLb;|`bIydhrH{~J3^2Yv64CkuK{Gm^K4BV21Yry68b>?<4aAXOHR}(Jqkb!Z4oZv5e}G^jcP{9TRr|*^3MS3BYio z8Z@IPJkjqde)v5Z&@SblUEq1tWJNKi9nuCW#a0F>1oC31P$)n*Z-{M&iuplr zptg3TMR>OhZ4DT5u>oFD^Oz&)elC7P^b(IV}?Q42(Z;l_C`HTS*t?^6I2-NH_b|A$Rt+uMTc z1EsYR;iK3Lu(TFtSpi60TN$f{bwcP2Vp9xo%C@QQ)B~4WXj9S0WC;C`D*fFpoAKcG z1ZPH}=LyL4uB7)dUf4xKv|YzvqMa$G#Wbopwihqu(%8Q9;V~%s2XG(^BQO(#ZodMK zZW^NE+|%Ec3;bA4+1AC!#{6Ef%@~zVDi>38{Vq@BTz){tNQWi-$+#~S=BQ6Ns@Nds zf=oQyNP519TH|>fPzxt*#^n9mA1;oXcjq^sF{!*G3$tu?zF(Uug#s*w3&X zajYLew9ktiLMa?Vkx&2Kjsuk*<%AX(6fd35IUpUsDuv4S zhA-4n=1#0U3mWL9aPxHaXWx1W=vW+ z9T^Ecy6xcjT8ZJ`jGj~0U`F|?2WXK#c$g`9>+GI*2JT~?b?3`%j&V{{zdGCwe*1Xs zG=xz7%{0vwHNwk&QD;Kk-UCvbO;`*dew9$mtCP@iF#0R49|kU@LBJ#}e)6JMH$UYl zt$sDEJq-YjfrMZs{$Rggg@BOHh%_PFSK`)E?wz^P?LV)VgjBAyde>kTynz4@h2*G6>CqtaPstK{t9es(f_j5>l zLh5DQroS4F8ZLfMYd4&1O-$Fz8Fc*Y^0x-`s|e9AF3_(O&~rZ2rCab$oq^oDHhit8 z=zM<3_2m>_d8BF6#ZSp13(=z&^)*5Q26V`i)FyqZ3R@*Y`9~uCA6Z$sVx9HktVAE) zxsHOzzTV1dTAcZXC_5Qrhf!jSxnmZ&b5d#il@Z7)s) z0Qq^K{FIOZMP)Xm;!04i#`6l0F-^&81$A!=IAw=0m$1X2PbWx1Km}2Tp$|SX0miw| zYAyN&jd}D09rKy+DOxVK3zlRKOJXb!W4oS|CuiT7npbRM%o#-rUrF)CBZrVx7a(nN zT5y8OxF_d~qM{>2N3cQD-;upH!jte>n!gJSOd zU%1H<_|miT=9xP^nk9ofl-@5F#h}yBa$Sp9`w5&bVlQ{3w(}Zu zJc(Ueka607+J~uOtVGRtCxkAZ=K87{VA~|X@@x%bhhZsly++xAx~Z}=PngbFtBbJM z--7eqKLbM7?!qw~V@D|!}GiZ@{ za5IK&SGv5H4G`d%8PMtNc>;wX-}y%(A^AvG=AeTw-kN>=6ByamSoKc?!q84jU+{e@u%mE2RX~A_GN!vk3*NqwqHnGr5`ICx6;76qP~!e z5R=qU#KZD$>724s%08J((#ns3*y?OC@!o(VHGjveiT6&|9W(kqJ{EaTdUXC;_hT$2 zF`TLm-WralqA6yluFizj^XHXb#|vi6^ghENv+7VJqvXr4lbK;dNK_`7PKR6IQ#Ifa zovxFlZ^2!!j>|Y+$j%S>Cuy@;_)ITCCK*g(_)pHpV&aJ)UHGB3G!>|Oa6v>K>Rw-b zzWCC2Yu0{%Z0aexF_8X)Q2BNX9Dqj)ln3zUV57=+S^p_qZH)0_FL# zXVy!=jd~AxPi{j_%Th}D0Yp>7cXJjQy98xc{_?+4fV_ipJbcQkMAJ$%Am1BK<;jE) zR)a(qBOESI`e4vEICDR=mT@8#g!Vb&l`$YK-4}o#`;5zgBUxa^m5YKA){52l2}2vr z#LkXL0>o$&xl}L-OmBBN*M-4soH$L~cr%zXt{-T_WNQX4^%C->u)dMNm`WR*EV$~N z?xL4Uc7?7<;K?k)6E)uCbd)}p3AO9+&{#?(QbFoFY zn?GT@7cUfWXC^?9L%9Z;m~au%M3fNTGAL|ccM3FQZ*$7BCGp}*K)c;Bc=57*ja?6Mwsl&;LP(_kD-i(!2R#A_wyUnH6WchpZ zf*BRry5Uz_a09XEFe5N^8aOJri15w)jvsYQY<46uV>*2d^)@k(8OnFar>Nl_mkW_( zMxZUp>`+?Lje6K1%99%^xGQ+W7(TRmMFkI>67e3duE^X?M%j?}|5K^PMI^-2MXo(i zO5>CzBRxIagX{+W*(gAz^FduF^RWm^y<+{ay#qYRM5zV0&WlyRs=&WmwHw?N7m+Gm zyllS|hgS(iX>#8Y>q`p>qX*qy9+1mSSrS6P|IIlpE-k<=7`yAM2QDR3uZM^ontv4Q zBsUr%Jg^ods~M0X*iQP<*c9dKY!G}>6PPbhA;n3_3aa3Z3ly~eF|;l3VzxPD6Gqoj zk|PaXH}dx zWF%|2j*<)m7+cuOfB4LUuTJI#FH5njxZ_(=t$ICcXk;BGIZen<6nO}o)_VVZSbM0aFuF4@9_QWi^L^gnv&sC zfqkI{j~BA=lFa%8befEb3dn0i28_&pAUeY~*R4KvysNL~t0 z_+kvf&cva(@6HC)e)~_E%$OM`E`cwxk_BHf1m|i>`-p4{=*2aiB#rpZoH)Hev}-Gp zefhxG>l%v&AkZ4UGhT#$o^M58e)l}M@B6ht>9@Ej!t3J%B6C+aRow`Pb^ z^1tq0?_gz_e^A?^tk?7WgGg&EI()oeL+$!nZVhsawNO)_AuNWdxS zk(}`{htJ4^E%=M;J-N^heO49U|qvBi1`(MUArgIA)xR4vIxHWok7 z~x#&fJu zM@)_Iq&J%;#HRr-z_^t8r2NeOK<9lH@vM-3T~ttg7F0AVlRQ%UXzz2hBNv z*@J<5f(1!G*h*C=+QC?U0&fQJO=>9;E^w{%E{%?8DPyr4FRb3fn%QeM3P<1(QMvEn zn&X0s5@lus*zjq-1aE!wd(!=wa`4tf9{93Wh;T8lV1P1|@aPx(-k`*Gzw`ou`T@uX z|E%Uj5O`MRTTE<^!Adfp@d_%+kY`KTGc%$cmhb*E(I?1ITnauuZ&8W_+(;K1l7ja> z%#rPwYrJC%rY`Ts9Z4v<`%unE7>9NIDNCRyxL zP@6D_X#xrd0`j>)f%mal@FMUT!@^WX3L=JcpP@B|D9#4li<`vy6JO@${{W6u)|U_R&8+`(KrE}BLD*y1%bE}qn=b67>f8~f99Yl<@umd7 zZ!1`tj;qR@Zu%bJIbMP1NB|kbyt?-2lOak4&-qI`r47MaL5byqbJU{k8Z%UucoJgZ zg|xfx4dsDzHl47+TKWA5$k$>LZqzZW@vZ5+>EirD3SSN!}nJk zqzh$4Z~j>RLe4y>)?QLC_an;v&%D>x{fP@L3jMkyM--70wihsw5ehfo#r(Dzt4zD{ zjB#)g7VIi?QBKMrvM9X~m5yJoY#QijmN?mO3wBKA53vIyUWV}GR51Zi^6LH8<{UWhf)s2f7 zw2Q<8XMEnXvd~PN%{GvJy6vD$Xx9ZF!^hhAe2E2V|0tu{U`rp1rf8u*`$s*m4n*DF zgcA%iu8rnn&h=}yVKt8pl-(k#w}(UIHdBOIy2x=ShOtE&Po8Qi`_ow>Tf}{WV;BS9 z>NqE&e2GW7AwXIQTaebGVP58hp+%MzvkuEbeMCva9xq;(D7Mz3)YKU=f5i4M4u7oZ zd#anKT49Bbqe?$FknxgXK^R8KuH=BJ0`15RjFbktdK4v5w__=bj*% zFi6f5Ze#LUF66-UXHEskeU8QXqxJadv2*epl7!hr3t8MM!pv7-4_NrKBO4zJEI5ij z)Ue>}31_T#mMh*4O8#51=h#^68nx6vD`d;% z{$AvM)0RE?cl1*I?NnhFfwO{myXiku$Ce#Gy?mxy+J`zKhPPC7PlO|%vqw4z%-cypwckPUk5 zwnT%SSAX)YNE;{U`zm6W$NV=6Ij5 zxzb0@JBIt-nvM*TCKOk0I8V1UM({@$&b_ev-hqMZ;?R?cD&LQyvC>A@6n?ysXRKrU zHty_*dlkfieapuJBb9trki{-@>15mKF8{h;w3P>nVsBD3Pc9_q$e3JFV>5NnE|aIkX2 zLKfL;lxDmw+_cn3I9nn_aER{^aodZwypvlKDE88#G?Rc&O*N`v3E`glN#^9?OSgTq z@ODx3>THx8|0!GXBPJ{loUXbU+|zdjd`irjSst!t+&G+YVaC~Vw!?`7D|sU)GrxQ} zMp4`mMB5(qP4U6;p4;Q8zSqk_sbN{K_CRn|nNUP;73;1Zk>ybjR)EqItN_~$umaTU zw6-bTng_rBKW$^oBO;SKUV5KuK7KdW2(JB$uf6`j-Pi|G1mV7QQs|I-$=#0+QZKG& z^6Hhe!{-EUASY%ECq^>ySU;*;v%UU5bt{fRv#@k-oU6voEc@>I8zef^#~WXQJg|_? zukp9x|CH^sgBig;$}0`Z+lcLK$Y&u!#3%zBjO|u76``u}`!3Fj4q%I{!=_yvo7$CK z_JK=O#@9VliC93Q>R(2DKNjAvQrr*579jso!s$~02?ea^i9V0*n{G{Irws`qHFLv* zcbI^!4jW&UjGw926~9p+itfzote08MQ>55T*d`4H{5Q_qiZ-X1`z~w1X|*BSTZiq@ z-3MzT4}HlZ(b%h&Cs%|V%%3n)(BZwJ)V9yt_xO`v?r6onE_t^{^f6!n5`WjRp(pBd zm}*p#OYHG(m z9&+ecvCKYAC?-WIg9r3Xic{Cv(N(e{irK=sqsWq6kR`o_CGk#ZUVy5++Zk-Y^ySR`I2!7>EZ9E?;&p`Fl?(fB4y07an746dJDLBAN3P`!O zyeD~WwH!~o;_&f5ZL6ced|RQdW@(xh=hN0LeZ{tpV;jo~C&WI}rFexS#w9U3cH(Z| zgE!vh6n4=d;tyhq(ig^k`N;bA&pju7@60w?2&Y3~IU(ue4F+UYOSg1~9TL_XriOuX z+}FNhI(taxu8*I|)PkeQAGKl$*@F#Hz<$Q@nh6f>46YLNTdA;!e4PSoc>%j^{&b8` zZ_P0=;`^$*bb>#ohd-viTDX^eq8@DOGU2ol3*{H&132)7(=+LGKc|zO>9nhO0&m5S z9fE;q%VZQZ(pC9kTHFoSBXiL{=FW41zZaH$cGG$FM80wWBn5m~fRszidkJM*-axvx zY4-n=OL{w-g>rr;q18yCUf@V31o=SH;D=;C%OS$4iI2ENS-g6K{O1X2FneHm0=A}j zxSrp$CGZiWZ*U--C|#AGDM%A$VYxRdL;2-q=A3@>ljI)(axpjlbzC3jUNGa06_t7z zJKrMbWZ3{6r*NgQ(d^{G#`}CeVQl!HB7LV)l$z4baA#x4%}gE*n{~WE5qOiGI#fiF z^Kpvl5&ho6(%&%OmyC*1lyBfd?4N_|r}_H(ejqNXdQmf5CjPS1ZgFw~~L9-9YQQ-coGq(5In2o_wWujlPQ^=Emd1=%H!7{;B!TWLO7QgR5(wYK( zOlsm$myvE$d9-z&9@mbPl$-pwAbG~>;NIlnVSR@661YuoVQi-q8#?7K3(NdgC7+JO z==@Y$q0z7d@a7258FW@VjKLmbn1!?i!gDyB3Xw(JOYE#>LmSxd?A{dot$0v%;=Ips z>S`MjC|9oPD)52V4uv5)wq2`bp>y|XE`159oWfmdI@Wu~_tITXKW?xx$dp_c- zEDQc40?`wZX)bc&Hv|G2r^QEx#&781@I&BGx=i)<7cLr-n`+qzCJYZI>k5^2&NS;> zQUq)6$IUPhB^oc(#aAbg-)jAnPWMy&#?Y%|!^ZQUk?_!^pX(D2JE@V^fp@dyfuZc{ zK)I1lT33oi7>qI)t>#ynt9`*!pA#2BlTXh*{9`Jh@ll^y!e5U*_koZc#X8KDg+hkp z_3`mazbLg1_oO1Zs@tFHI@RWy4(&9zjmF>y0Wstdbk3A}oYE}Q%M?aV%L+!7;pPsL zW)4NY9ae=f;-}~mT?3Wt?xWB<)r;2z8b_={vpn3c?~dLk{MhhL@^_JM6icVN0$D>7 z?pKDQs<>7QZ`G4d!dRuNEE_T)nj$u*2SH9cHA5d#Ld|vfwDWHM6M{~iuURIWp2Anf$>UNr}n1XtLzprda1RueQ1^;a=+$}4&zupuAbI#6Bj9JtBiLk zT1UjPvD{at;|Ayw%DUW*h4~QcfpdgQ9MMu+Xen_RY>w{97mZQ&%XY$Mgw0-6QYNIp z>?KJ5V(H=o6YKqGl1GB<2v0mY0F`^LAtWTUrV*dmE;-@%W4(?_)A;X;STC|nJ^!k_ z?!^EiQ|D&OLL+Tu?CYbdNIG3dfSX2*Xyu-*W4s z!4M~rQBnlHhe*!dM-ta`gy5OcS2k1Q(3se^6|ptr*1Cu(d4+~T?jba@;bD#UH?Mx> zrNXK=R%R0Sy&dVA%0pa9{X;p#n4}pW33L8)MTRoQ1}ODnhcnqc3^hI-9vRWB4L$c* z-u#Kca&WdNJo!e2@p=rh{ypiJmdEw=v!CW2sp(2(c%Rk2O$(x{n4t{0c@y{LK;9QW z@v;H+{qstPyCMuXl$mxvOwP7iEZQ22!XLBkH8$LDm8E`x$cNEmkHLvl_p=1;bOh z$*U3DrAp3PTsB8L{VVBi1Azf{O}{2Gwt@!ZO3x4f#%ceyFwGp(L8_s2M!B7VAE)J9 zrQaB&W`PTv=HSapw(({o1#&n_Y|7rhF8rgXjFm zc}%+Y81ce`jR%X}F3A4!_WE1m>0!3*z?fzAqafo;U!OIHVaxB|XSMY-b^p>J*~?UC z+5-x@^B|~Sd>DNEA^m!wcG9z0{Ri{NH1Gv2Z4xn#ig_wQVcfkUOszsxe;)tx^COdx zZo69C-BEFWzSC*dJC|rqF2%AGTJ0{e?007A&T>=@-oLERpME*^vysrHkHp=#+U*E1 z#IMd4m*^;zT8dZwu49|7-kz*ZrzPNxXqt>xM>H|cYZ3>?P{u^Y zB*gL6K5L%5)b|yh31Ey1`6ssxY*}^0p^&$P-SSL0Kydc}yDL&M>D%aYJyPTgk;cW{ z($+#NMUD0TG2i+4r-bt4)C6k@JSG-Hlx_5M-+80OV!RJ(DZ5m~&u}xKZamQ1+gYL4 zESkK0y1m(kZJJD%@b=juV-8k|eYsOb^2yY9_J8)EHVOPS$@_~>drzr3Lq4OHh9htd z-R>xs9}MaD2Uxx7EMwOwGGy2;a~A!~yOSRkRUYHSR1nt*Uc!IGbC~2$r4@m#eFQzu z%ESuB)^3KT%00G9vVulf6Q(%jUxkJ*RXgGn7QAvMBsje%$KrmzI4ieY>QFYQ(MJtA z={7iN$tNyB7Cr@c3%gy}_>IxLY|hP3NNT>$`wzm{Y_aw&W>Go!RicK!bs?xGwZRy> zixM{n5z*SZMIL`TU_jl*fE$ym5NvEnKVcu$)91WyBRSUCUO|czNl+2q$#xar^f13* z{|O`J>d)tHD?8lV?_-T(zu>o*dTn}(&zRnB0&2x@t%jVYZEzA(|8a>O&`?w_5aay z)lpGyUwdfjQaTg{Q0eYakdU&jbhmVONlQ7PbSNO8Al*4MNJ&T|LxXfTeCOr2zP0Z1 z{^R(bv(JuaKYQ2D-C=5rHgIytC^7a&9yLajn3CN z!ekaMYU$o0>MOH97rIF(OKkp5>CIwWyx`(bJ zG3~X7^UU3uQv79~I;*zgr!S_2j_TcpO1#IdcC3?FshQ>Dc%ANI@tY&GL=u+zI8>@> z-JjzL7B|ZSm$&JA{^*i^E9f{W>)UJ}YPoMi1R>D5{*X*of5s(Pr#Q2p7#V&Njq!dcgy@L^Yh_$-57Hr*3G%|0N`*T}NoknUX+y5u*+8I)>DndCtGY zpG4CY7SuhC6^~DGqqJ+N?!TgnnCavZIbAq#5fYN)I>`CPZ+wmJ#HMPrY1Z3KB}z6D ze|KFBOmFP3Har?Bx%A7OElZ1*zEoFhxLUa{S$8d-B#_*ntTAGnD?<@uI zLEuxwiJqG(ge;$oywp&BWlnJ&Rqoa)uf*4<5k@HplTe)M6_up$bvKi< ztDuAX7u~Jqv%8TZ<^~@~9nvp|_;NxlLf%~avUMcJGWRSpiJo0H+qa&q|07CUOkg5K zLpcLCE$s$46h(EAO(^zh0Y~8m1V{#WV&l96MVDZU5V;EvE37uETEmwmBF0(>dMg`{ z%i3(&ffsal7^-e;Is0*rN1T>+CyAnMwEzG~GAdu;x&Bb#t!G!Gdn$f${piB*yYlnJ z+Dq%bi!OvnDp{`#o}j#AR#7_J_}1#mf)e7VS6i>eIg>&LgE{U zIa_-3F9eXk61L_q|p1Ow78| zP>nygccxdGiM`GG++sLcv(>?9xU%IjH(KMP&wdk^=e&O>yxx;nZsox1*EYpYoRS^# z6l*qeire(ey1nQ<)Imsi0I6YG1f+)}Ko510lB$wrS$G4ydzL9$h;zTG}-`E2*7JjIUCEjIZE~oMSI%D%J839_BQ^$F>)n z{r2zz17`CU?xbzr-3v(=bFs#bNSn+?g4CPkC&(s9MQI_s=7Q{6$&wZ&jg|~V%->5T z8dsXANA%lTBe}4(G)lLoW)O?a{T)qmwmd%wt(^v*g5N#Qa_$k}GYG%g3@1?E;#Iu& zrJyzf+%d1_ZrvLq{c$${dwP4iEbWQ0Nx7|D0H?{NCM%MtFh-bndPR(0b>U6T@1coN z*C!`~it6Y{0l_PIwG9R2D)5hQo{%GwAR?nZ8u-W95Vsmn%-D@sGS-0!r)D2op>nDeYJWqj8g1p6z{h zY9VX2B{30Exypjt+M8a__c+@(yjl5ewgtg*loqOzGl0@?k%{O z@8@D|x1_?zm7;KA28A5lXa11b*4ZJk6#=pNm4i%$cUQx3z1EI9XlH^}C*Si*P=26! z&6EeU)Dm_w}ic|$}#U7uknBnp80_B#AfYXl}u`m{NU|zrzM`BT`^9E zv*iMt)3R+v>2Nc1-h<=%_g#duV^v$-MS?q*%jzIlrTO3_!idapCU{yD5?2K@Ql!wa z{;PTS*3j}^3PId%?Azx~yUV5?6cKl(zRSxke1bI59pYvIKr2RPfc@QYpc8%er6976 zc8uS6+vhgSF!$OJv>6UVp4^COd-P37OMHgfJG-5Muqj&|D^Z_t=2ZQa_Oa?UJKK(Z z9O-l+T`pE#Pr;1C!|p9_u42*BBhGjF({xS70gPKnd@7;TuJcLJ%gxz!w|(Biw4)lD zVyYZdCX^GksTnKA@4sJ($k0&tTZ{_rMIi6v192pdRsc^Z0KdqE5=V}MNZ7dLu~d92 zF0OSx>Hn{Crt>yH?B}XP&Wu&r76xRdjO#C->e#=izM@yN@hv#1LQ1-Q;U$PSDqmet zQr-BE3hmCRPuj#eMY?J<#@Q~ms;ni6_3L+Yc#?0&L>a*Q8E8YleCHLH2`UdjEjuz&tBsO@E6}f{BFz%O-aqHfJ4&DwVQdNP)8Xl3= z7|pOax)?Z#)>zr<_pa}Ajq#9giXzSKP8Xn_E(72}?B50mG_%q~d$7@kBy&*g z1g-ti4bF^)mVVY%#xuCxt+4F@X2$qRZ+DG;H*MpX581ktLJ-v1=&gBEJjco+96Q?J zGt<)p0;Hvs%N>6WHa8SUue_QbgUNIf12O+j4ek9hmplu~o(wj|Y`5t#OX;j9f99(u zd4{?WoA^G=Yj!LjZ$!4qd$UuNm)_()DdAbbrI6Ro8hygo#1lzdoGd4w5+;LW67A^- z+fJ5!?M#_kH3}cL{3^Gvhe`#W>D(XaE=0)2kwDR$k>>CD9iEYOiKjUmZB-m%|K6?V z7i_s!8l|jC%DGPTO`1m0(AVAc1bz&)8rJuI-#937ukOCo+!Oi%yb`0V*nts3V@YDS zDXzk(abBHO1-7JaE6uO;Hh>X0y?i_Bw*?rz%6+PCxPc{>sx0#Vtq12bIpQU|SwM_8T zooQ0*(x(CK<27*5nH6Ak@Jo|%P;JZK5$@DCV^K2eIwYFZO`VhL(cSaBbBm8D9e%ZZ zqSG?_{i(kh3`JO~3^(}!Q8@W(M2K*IA7Uk-%*&5t zd$L2csJ})(bU1Ct_K2BTz`H$4*CL6va=NPQB0_1HAcKyf!gT&t!YgqYwmvl*$(2kJquh0b1Jq7exy-h!fH*^u5_2o}~uUC1x zJr^#HIw7Y*&9%QBW7m9ifqe|;Gd0Wg?p9Q9-4e3e*;V-2wm3;iyno}j_n26+EFEZK z4|$sCoAED%ujr2LF{}6fH1#sYeZ1X!71*~Gip($-w9sCLh(8++CA4Ut!Gblq{X_%S z5gzL6eMe6;X^*RWs1O!A@NdCk;J&Q}&D8n0&@|4f51%QOyu%A?9UTWxRfRC_AA-MR z6?F1Xd&MPcxhC>ojQ?)=i=jFs9+(zuhmr+iHz?xZ9?}P2FS-3XR0?BjVTuT??xdc^ zYa+^g>BLr)126ntg$y+^)M=3(-(>nYG2-Kic9R zGMh#@rg})Q+EpuQr{GLuC-x$@gp#F1V2*P7MotivRUKRe(h^RQ$6;MM+!fSORS zppYy2?ym$I?P(v3vyV%MF&G+vx%vc^?@Yf~J;)0uz=hNh6hie;bAf{(y*lR->50n)x=YxH`v%vC9UN)mS>s2!y#!YcfRdU&uAvfUjVv`O;|ahB zOPy>g$dJSV_uooS_yP!N|QwaWF$2m+C{05tkGu#jy!T0Y(X) zwGISVVt+5W4_#mM*CL9M*k%;zjKW;(*F+c@R!fbciDW8&`4FpGw zZcPwjTErWsEJbB@muQt!OSz+EQ*ap%#>bYQtx7Xg^a&=T>8GtB3lU9UIEO1+8M#Ziq-(K>IRnQR+O1 z>vxRYpcAFAF?7hswaH(HJqJ+odI>bK+*SygWT8aj3NS@XC^VECSns-Pb=8Z5DRVxB zSX_M)uh${A@hPcYf;am+bbm`2)V;8kl9O{*R^p_5OqZ?|417cC=!4255M~n%Qd5KC z%or2A4T~k8&mPNn$@L9HsDU0<$~&C4&dHrU+aG_s&ZCE$O(vP|7W7J-o$TIyt8Qs& zZJ`m6Fh(?HE7IM<%vm_+L3PJ1Q?u#=a}er&3B+Ii2dRNbhlp#v94K7^3fu-}hJ5^< z(d{`w-KjG>c!Gft(H!Pp!K>F8=It^%K2qaPj58*we3hXmustdH@Ju_7b4-2zivY3~=NQ%nxIY-h^PiR=od|z1ZmT zvqrp>4d&z`n;{N*MW%?Fw=aD^4zYvBqO!o&oPzgO#FN<}bN8l9jn1>ZE+kl*QH6S| z@6-O*PV+@IIUfgoT}Z3wH>q&B%W*L|R7OxgZ7W^euqG;&AA<3IKu@ zr&iIKt^UQXYH&@Hyr4mI4QziXz1sndmGL$-zJ=V26R3?iqV2Mw-hptuDVAZhw;;!8 z+v9lcJ?e(#Ub$x_%O>hCtS^a(^}R2$T366JsLtq-ew0dNq9v5@$GJAT?y0c~y@Lio zmm{|Uk0o~m4dlEKr>c$eLMf<$ZhjM^gU*N2>Z&_2Hw$+HNRb9FLTpdd)R-Le3&ch6 z*LH^}nhv*CJ+{9EonE~yUbvGlLT2(8GXeDUFzwqxhn5DM;JX{}PY%Ku(JHufV3x{d zp6|Sb?PK=34!HzZIloh|TcnRXf)jnq!>V5P-Arzz6B~Y?tSC(xu|9}|;DC1;;5x}7 zK~N$UjfALXd|j+j5xQq|Rno%9?D9GhL~;^ih>ljV(Kn)j80k0>c>?9^s-_@H15tF# zF-3xM-0e{}3o}opROpdG$;f?g_um^y*T zqj;Z+9D?+@&+=WEx?MX+8EcjtdzBH7@jlWB>gikReEN1n#J3w#2Ts8s^O}5DB>fq7 zXmr0gGkSwmXRIJ5Wp>Ju#{4_2P0+fyp7|K%3nT&6;7B7VA-U=X0PlAXG*h$z95@6J zu%}~IY>G|q`on=zy_(o2;Tg&`6fIttQ=7$A9^Kr^I0 zsHI$y^VsN9Z!Fe+e;gy{@vqb=(^V`a{BTwcWem6zIj%G@MNmfj(yDVwjgp2{L-0l5 zn`Q$~kAg{xA7e($#;H?Z%p=eqg(x3s0MCLygcb&W{?u9N8Z*j4Eb_2_09_hna=0~V zYGiE{N5zU9<$*gevlL+_reS|*`*JVA`4Q||+@4>uYpsob25Ql8n}ng=iy)IHp3MHP5CxX?n*h8Q^bk<9zEm}2ry*cbhcRZF5@Pkr9f$+NJO<=1ua0#p zCZ}O>^V3Cl($f6;afRPWS<}ED3p!JXMQzffbVdhe{kZkk8c}HU8#TriKQE@mw2PzeL zB47`YjUi;wd`O^7%mFyKT#bk)RoueJ&OIrgDT9ZPqY`RsHtZe5Lc3qQCZ5p&MJ~|w z2)>jIsWL$cIGL^h>%bc)kzXGdgl+XxP$pNV_Y|^v27JibZNo-*(6BtX64(Z=8xwN6DhPAB=$bRBxt2nZc8&Q&5n597#fG?rl;-3E?ZEA;%Nd)!4 zFarGUnRQ$Te!S19+1g3cNYt2U&4!(eSg2ydJ2WX!huHskyDKryQecT_P#FZ)8;~iU z&cd+SOSrpM2wD)0gVXmY-a>5);qBQ!kz|lE83#coVXP8TU32#o_v3=Yr=&$;TcK#+ zbY$q)c)Ol=f_NDvlK`!+BQwB1pM%>9QrmD$C}}1O0*ZkjAdL}E7emJX?==)?htht` z>6HFj<7ju?{~bs%^MOJ|BYzUi{|usA`y7Y_xRK=I^oQ z9yK=72rQvp4AjO{tA-rB>UK|TU{4{KVdpG|=UDiu!U`9j-fv*j4r0-|53^kZ06U(5uGx+YN2!Cjb#p zCmNHJ5%fDH82DW*X__jT@O0>A4{?cA8gX;)I-Cz>gE3y}%~Rhg0+#vZlKRvExSDcE z4VPM>Wyalyv?K5agH<$r*~TZgKi5&}m_ieP!j`2$ya8sg$~a9Qqy~sw*CWvP{^vsB z9|3+KdOBi&DIKeh2GEuy+OA10Pd|(nC4yP@42-a%2+#ZIC0QF{StDD%8bo zZT`}%uYR|B9MfGt*`rWeCRp`k;g=YS2{KEvRLCqt!-4r%V%95#5-)j*%?fd=9Xbl= z%&t07R0VEK%qHoD00kDMr9G9}jRqPL0Ko`TLrC;|5GGYX^T#Zr3SS&I#nzM_Qm`17 zq0=LaktRPGy5K3in-?FRrbd68X(`-lageOC4fy6qU}Oq`)&c$xPJ1lhXinHrZ!NAv ztMqiJCh7JbUcbwfcR<#{B6#bzA3*MnItv`MFj_O#{)KazTWmjme4Qxb`y88-k#421 zaaF$~a8nb_7&pL8EjUB=Ouz*>-_R0p&<|RE25{GZykHz4+8xGZ)XP{~iF?qwA6#|a z?hxC1%RNz?Ft8elhKkQ9@qe;?g8pKYEC4UML59-9CfTk9$I4+H8dBhPEy(>U%7I``*P>^~B{_S9Tgvy?ltf@5(F9Fd=d<@utfB-rppeM98 zAm_jZU{B!#;Y*; z0ayO=g$(m+5jw6I7za$K__3aIwexpiMHIIpW&-`0fcQl(1Go>}P>8eTkH@zWUN}+h zofqAXWmeMD{eE~pMqun=;K>K@4PF&!Lql;i5qd%Z6-7*n{slRi!tk2u4CpgiEyJpD z27gx%BlS7hMtZ^oP#QCOIUUbx-|Grso z)70pK&_oPLmM<7|DkfkwV>_j(GC8@)U)-HxOAx1*HBO3znTV>5O+r7rff*VSz(BmCuz8`TSr@*H~~{D6%17U`hh_664?6Vck{ zMVPLPg}6KaID8)AZGS>t!5_0&lRYlzB*cy3{*%kV(~w7vm5FKK)@8K-Iml!9!-@ck zReUpn61&##8^8sT=7|M=oLA_O$i(NrUZ^0bA)RpB>Y1D=&#ebXg3on;&c7KjVeoQU zy$d8D1nm7l?-eCZsk(TF9c_ZBZ`}{*?!@`#`V9EGL`aBGU%V;$9PHO`89SvH@)z(K zHUS0ahuvA9pQ@ocoy72v&d5gaF@{?#A@{%Ebb{Fc*di)TdP;hX(+?BONcbRBA(6{cxb^y=CAaSGDfx>vFyZmUsMc}3fw4^k%4CNg4mO z=-u89QW)hfJqiKzslv4`ToL^R1SitKoQ@e^mBH-K(<0Cq#)IKT~ zjJzBe_j|8{X72)dIb9J5)3);hGm=2v1C2Tl27dY6V>c@Xqpysw){GyAEtHC&^Dub$ z{kNX-zew@F3}xX{bz%P9Q^xy$<{vVZKKK}V(GAKRbt6hk63Zjq5C4%Z$Ja}vGwSIz z@o}+(SD|H-A(=Y@`F%paymN{J1}EBSvQ3z_7vJT7pyWc2S{Bi&5T|FEszP%IeZaMU zZiB5m?yG!CQfe9?i1+|X&`PsZu{qj^$!FjScJxGyUS-jci9!Kp4x`ohTvR*$zu!Ra zK7t$=qD}x#038^zXcw9exmdg4^=Sjycb*yld+gAWfO(fsEqlzUOfZxXwa)1Yjte@h zq*YFKJbRY2dsL5i*g)fsIHP<21+Pu)h*Ay?v0tZSl#5t;4Bv!4_O>o)9shC*B>HZV zixK$XJSNVy-WE7!4-+AS;DWCcU~D?>8Ept%S2XCD&KpH%hejZVZ*hy)EqHzi1aAg< zt$EcfNFy|g7Fm+$ves@XB7VA*~<)UH{)OnlZN>fo)~+h_Rx? z{BT0$PYHfVq)@AJvU>gW#P$Yg?J@=05?m3oe_k*MBan`|FAbu7R=3)0*yq~Gbt>6V zr2xm#7u~q`6b9^=b{BpO3o=4eMF4GsCdN46?*GKs+(2>g)Q?KONlQ@_|-*QA8d)T}?p=v+~w} zr65#~_^Jpwa~!ZdE^G#LJP-l+MuAfgo#g!kt{!<1FVyKoC!7GLDwSZTth>cic$#vi z4pxc<;obLvMfsv@0&0mV=FXCI%+@qjJiiaVI`qyT_J~;B)KnP&D zg&T0NxVtDeneV_uhe~9d&=oDZzuTC@|EMAimiKs8O@SJ(3Ipoi|DUSM20~~GI}kW& zi4q1NapU*tUNH8JdwC}9=@KbPzaHFEz#eo#Ekn@*GTrVMUqF*D0&N9&7YcAc#|{|B zTyGI_94|z!F|519C9N{aJYeh2?O%T=I|;HjAa87lJBiD`4LSjkZ$V>_1+x|tCYq=C zKS~HQN0f}UflV!c`M^A&gDcVW!$ma^QU|inw`&WiE_{e_D!8i;4Lwa#b2>YY&!JqR zs7(3Ou;qV$QQ|B$lXmC=E-V#7LUESRcgW0u9ITMNARv-aZuSY64!eoaGv|8j@+LD! zy>Wz7MC0gd{#Umm1!WF!8ps*RA;2()ByuOf2GC1LenRrzF^W9g2Or;q%%2y1>OaBM zw|Js~%F`#EL5vs%ntDyIHrj!&m4VFn;cmh`YUJZD1c!mvS-mR#S(W7>rs{a82ki?*499{1<9Dr}A_i{n`-#kegeM=62Lz z2_3{xJC+I2tgl$Ok0;|>DZ%wuX8gq;bj?k`<4(EM;3>*!&8m=Imqp^!wL{naIsYq$ z;;p}OiNzzysA1`*G?WCHc+WZ<<5zTMDIFU*C%H0@G@4XiE$|1c>X3*J?xqO8l6P1M zUx|k2^PVl9w(b}6HaZAZ*&1PlI_}@tWp~{i3AN09OE8<~)2nb1*hAIfc&tRTS=!mR z^AH?f2sKgtbaoh@U2};-i&qi{&QTT#h5>l=VSnrEe3p43O=u@~ef=Rk4F`;fv6fH} zny6ZyC1cHan>~6hKZva2H^M-)_%??e;u?x>?ff@;QJ?p{b#sfajyc zKF!cxmp~&!EedS5dKnuV^tUkaLj(0(D`S)RM8u%rfjz3ojPcH21$ww6+s7MtRRQpe zFZsRX#O7`~?-kZ~qbV0Bt9!jo89|sDMx%}We608SALN5PzJlf$MMlU)Vhf@310RH8 z4Rp$9DRl7Trcw*z?z4pcQw%AwdhvYw?;$S~6bG}Ek^<4b_VT?huRU52uXP%zY}pun zMrP2*g^m?#Y$BSTGq-QKQe-Bo^02RU%h7qLtd>JpsB2x5Z#OQ`0N~$)o422f9K}H- zfP?woS8_{q!0>Mfd~oi44hya&=VoGg^x4jzQfb>1=EmR|7A7pw9+7K8CsLZoduBo{ z?8D@)e~pG$7CP_dc}EjXHy59lwvC5bdmMb~rVTbjtLuBvf2leEGOCF64f9(QmW;$YOR z&{Zr7JSsT+@hvHWA6W4>v}WvWCx{nD)S8@Lhxy6rn?G493md!1n3Fgf8TsqCS%Rx8 zxr6`R(#afc>g$X{OsIbN&$(NuSjNZ!(4H+W0@D9mgpiz)yo_A zv|PTZanXtEL_f+FtNDar#I*Ok`upWvS6bAgAZuxdY2d-8B6a1K1P-n8!@I4(X?2p$ zV99hKFrA86xX9&InD{wq1Ax$^FE0F_ePGV3-)SMg@Nk&5KV4pw#zdOcwMdz9MHm@Q zQ@cel8Dg_|IMFPO!Px^AgzD zG$=o>dAEJ^HBulJUgVK-XX(Gp3pXO=)~>rm{O>!v^_8m#&zPOHUI?(Cy0eSOf{_uZ z;b?Pj0E>bx2hCp_cy4j9(6tCfxf~oYKm#HWKqk1T8#D&Mj;64@=3ZvBKO>v860MPa z7JghL$#rpDo`y<_&g>xkz+Mm*e9W}%gwpsnkQ_7$dHEhk<$0R)Ebh2m~FQo zLf0WH$DPg$gFNqpw0kqpADQ5FI`$lzA$|sWyT(ljagB6@6#jjv=EC0 z%6i8WX@AmW@l8YmBi$(v%yk8@rdb92mrUo041aCzNuXcHUd{rBAB<;nIHr z@Rf}tq=6g|h`xj3WZwtp?uU1Y3AkXp$xY)XzrmC4LAp>QO#dU7i+Uwxwbi0tX(S7O z5!LUQ4`AFeZlu=Ze7Q!Cfi)@#JP7T3r@A-VOlQe_DIO7ri|m~?FdQG+^8IBxEo8pN zdo{>>GcaF=(b7frPh|biGh>}4Gm2fM9ZFtR$^_eI+w6M}YL0eLcMr~g!I7FVyfKS; zoH4K{8fm)fF5fbd_w;AiTvX}mZ}nI1fBQ`3j9+2|%;?drQ^WH5kCjO_12{FR-MhPv z;23@=TX40R)xF1fu1Zcz8#^6jt3xnI-7##Hn9czwfK699)=T(Y<_?xS?03LCbQvhr zYlBbpul->A<#>wfJ;+Y;wyIU;lZ{A+ui0NKV=`~9n%56qPhlLergO7QwDM}o{ znyn$Iupj^x&=mp9>cKN-C$l!(M8H_&b>a#_iuV zE}i!{QoL}hY)MPr*L&sUnL4z!>Sziq8xP0x!%8?Jfj(uPc(#VbXt>)ieWmoHZ75L) zTpsm=?`V06YYa4Uxga`W?d0OicitXj=e_1h^I~;Qf?rxRn%O;}@XKUmClOip?ner! zqWekDYU}flE~Cc+Cw?Fb#U>FY5C*BsfIYGxBQf*NdtrMxK1rb)hZ>=O3oG3#7l_lY zRR4yQ*+dpr(Aja)=~TcSqN zP9+gnQY_{Dfuf489c98jT6*4vhm^Ypz$h=BfPCQ)Bo%p5+zI5n4`3|$DgVc9mjpSB z6xOcPcT8BgBDI_FhG2kT>|c!g7|RS~rXsr7K!e$9dDX76pNd3ncPNwy+DwPLzjLI! zQNwUzb_n(Sul499@#&>~52Zz3`L7*5i~Id5^|H^%r}g3)K^wA+(>1O=9_T@Okk^!r zc5>$fzO@U_*v*2kmEQ@$>$rt~m78Eyc7K)n0|U>_;%g3xWsX1=(FUPYz6p>_As%#q zH1QuNyBCQjA%@7|QzoK645;)VdwFxGVCnBwg)sAc#Zt$vbF|bBUi;RkD{7iB%t(et z^Pu2_dbszi-I^fAq_HZ@xrfKD*WKS+4;24tzdJjdyGbo#I8bC-B@At;;#^VHq)JwB z`nq~ItR@wANm}q>9$)FhC6PtZ&<%;z%nG4ON6QmZ75-64GcC-^mSrK;qyC0Q$1d2n z1QCEDtblMmA%mk<0VtyT<2)z(W6TU35VCq-kZy;c9>q(~e@^Efnw$;nbultT@db(Z z-|R;)?oF(O;VfN_;9DH*r8di`c`=Z13{2!rE@!Wr74dW8h!M{l53cCZdRSiVfFksP zM4S$nAGb!|BO<+=Iz0(+1D4UZX2~zBir?bD(=o;3Pe=R#t>UrLUP+0WUV=`^y<55U z1*wz_v+Yf`lvOz@WPd$S9Uy0Tek)x~0$*(wCRr%Ucm!Hsb4Ij6X-0TTZ6ep zxs&xI!_J<4C)bCn`aY|Ze(BAti)WpZbBhb>i8zTTY1*(-TY|Qnsxy%}4-`IKkvPej zaJ-0-rvqanaa$(&GR8DPHUcqvQZV~-^<-f!g1P4xArZU?3=-|hR=(_hggb!UR11Ki zJLD?(ena{Z46^h(%F}xv7hEVjt1T(zTctEF9k&}UuNq2@s5AVocxiMtdY<>rN3zV_ zXXm6v>0#mb;-UCvM=R4%*3D-#Sj5(Z_>^M%<)M3x7uSs0`*_~zBljnska5K|G`aJG zg&wqQV6V-Avht81o<_eU)nPLRE;0zh#6dCnbb~xzg1m&`Eh1qeKhRIZAZE#ZHwL7N zP-04!0C+Q46QR%SI)~G+VZ$Dy4o?R6oTqes?Dm(1Ndw1*QdXzgO~-??_I?Ay%iV}Z z?8;D_d~6PBu95x~J??`>Y2S1%1C8dg;7E96p8hY59bMHjw&jXmpVnt_mE2bd1IfKb zzx3B>Mnd}U3)c7j7}+ABoajQrBAVdSYkWq~W0ryqW+>TuqKl{fQ>ODZCt*m@Op+tr4HU*PPOM@bkAQS|PMA z8VLAh(EyJMgYF5f-$@^LcEZNr0;%>7?9x8x--I3n24wmhT+r)#Tv)k+AWRF&JDr;} z-cQ_xZ__MF5o<7in#YO|=SZD5!J`uQ6!2_wR(yVpQUd=uy>{jinegb^@V2ylw}>2Q zJFUYppSFv@#R=2_z;I>(0zsA#NkA3@^p;?krvzMqvXJK7zgN>}iB+wCslU%3)@VJc zjKo9X^*=n@X1qxzoHkg~NXQ6@`R~LplVyZ!L?TXZd5~a$xpct(nE{?&)T&623-ODR z17WvcmYr(U{c#1W@#&c%R>u?3J!L$7?t|y{w(qj*%Tp6GCi;djxs^(}y>tdj2>Uv#~rn ziaJnyml}I;H0Kk;d=z%>`q$yBiRSU0Chs|a{>SbrOKAu#ILV{9O^JOXgX=$eyEyMN z+w1kVyEFM;@!?Jc=3ja%9#Y$mIFA1v?$Vjot@XfkEp7;&HZ$&!;WgS^da05j8OUx| zOVuX8OTb(sSq&!W`r?%>Y-rmAL`GuP6^{Qnn9lJENj8R<&|s&A+n_c;+Uqw6u~9?N z-2rd{Kt`p5+k3#RCWvx3_>FJXMair(PD z_e7@}(Cd71X}^BlrPdXgw)CTswA^+;%ig4Bx^{KoX%XmOmVV?NP();L)2yPTY(8A* zIHT<)sno-ul8Dbva_62B>}mRoD*APXQjW;6Tp?{5?RCoyc;5ZUZE)W%8@4b(IcH>5 z0#Qb>S2A#m{k1r380%=LY_Q*dJ&JM*8Gwt7BLNY74s&}|h0*M}?nT#$PXsOH?62(N z!exLM7?at_2tgv)`Jq6nubuQY%!~L%<(6Vt7h}eS;Jv}?$M05sZ~idOZ?kW=m2pOm za8%-Eo;b`ti5X>F@>t|uQP)*8YVm+IU4<2wfk2hNjCfcsQ4{M&(w6BxRZx!i@9db; z-h8v0G@#`*vE(3rtR3B9E&u$%@?F!{E{n!1`g}p3D(-UhwhFY(C1M+bwj+>z>QMab z-)gAaRz2PkX$P6tnhh}pu^w0ar^CDQ(ToEf9hAIY3yQ=1Be01kGhu80vy*OHmah%4 zPte5*Zy%6GSVQWn3vd<-#tnYM;5r8%k6rx|my}bR1z|KzzW0AGOsdW6hzN%8@Efhx5~DCAJ`O6e(FGY1TR*LGw5K?d%mN;e(2-<_Bw@ zVQ`N$M3l-wapV|RMzhV+_cx^mzRJ&y<%&s_RZO+Zu;V$2) zzTR+5x0Z28a1p0BsWn}o#sS*7!3eObt0-4ZD=8B)W?{T9d~>Jyx+R?>U2<{h+miz1s23kA69e2DOilW>9) z&kviT@09=@W4?HofB9#TZo*Tw^UhmsDj@CT#p{KIJb4)E!q80YigQzvf-$x z;)Efs1&&!7R5^yc2}vW^y@tU_gTm4|)$YR<_3!YO;KYWd+|@D{?Gn+4Gee|JQY4Rw zbopz`BN?Mo`!D{h>wxKM2XWT-1oVw zqNPzFSqzL#?sK7v}%7KYBMQ5$WvBXYZDd2 zlPpTza1-h7uvba>jB6+I;89MEBv)>KX8Z607EBVmEcC|0WZJM!*QLU3w~J9y#amfn zPTc_;$DQ%(_o8HnuwP4)fV60A_uIMUJR}D6@7fdo9E|*sUlE6!`euU7821XjtiIw*)E#NlMVcBL~yVU(4%*>~H1^v|o|u5loxf zTuJ0+)DXmerhDWxN+;2>!EN_VvSPHo%8LSp6d){=DS>&6e6X%LDet8g63blrho1IhiID{PJh`{?~&I$$g0KQzqT&orW%m;lHvke1?a>*&xG<0;j4pXHX#*LREB ze?@oY*L3qJlx8psVQu9K`v_X8pjGr$1ekajP<*RRgr(KykX(d{OTn<;EoJ;mJJ|{v zP8O7(yR-7g>UZoW^z77ep@?^f$x}VoBl=-f<~w=@$UEDZCR!6!{fAibkFI{{>0bOb zmXstFzSm_$TtsA!U8_0d)f&--+SK@HoSZA}ao##&tU zIJW&px(p2Cl$P{Q9KB4E{Lw7q957T$T(6~_X@_r$r&eB+E4pIWN`{{uzNl(cLCZRw~{0-%KEv#UC#c~zuDadM(M4;)HZ#d znHLR+ww;X1;E9`5&OMkquz$7+CyVSP*@V|xfO5fOh!XhUtZO-dK;?mtQHP8n&!0m1 zQns*P=w2J!(8Sxp1j%Ax7aSQ*xLWI?h;%)s>1mqas%}@u{svl6ZT}=TgMzH``kQhx z7|8lWy zBD+AtpRxcHp(_9BiD8~nz+s`waQQGSdwiIxw@k#ZCP5>^H{Exd$nkQwz8XbHX=GP( z@eAeqG~{DOYvP%&-}U(hTR?osXUp|}Yq?*mB7mS;{9}Lfef3Q2Tk0p=L7l8$NCld~ z{PlpVJphQ}x_g}mG`?g*9taK;s}eKuk0sRx2`?bMDAXH2r7QfM5B!%|ezp4+K>lO_ zr^nR>Zrq83O{{$Wa9ud=jie+?5CkeCI%$M#{J+j=wc6t`PG9nXe6!Ygpfbnf71Q(q z4Y=q=3Aw+>H|w*o!o7Pmx4EdfI3$(HENv|^&Q%jJtKjb7-;ro(`7<_6rvq_{_`{oq zTN$~l`N+4$0kX0q+7zLils{3S&=XC(eCdf;3?Y(pC@>Ot()wdx!0fD$HQNGQELY;> z;!c1qZ&pb_Um5|8n!vLoA2MfMBrym>$pn`pfGDRaf-~`Uyx1rL)qEhKu)s|8sVPG+ zG~a{D%eC~=mBMoYc?<}E zHgJQw|LVcIvyKHj#FpV=UT%t94%8RTdD~F_M|<=1$(RF$iZ)#`Hkm{$_4=kJ-&Pq) zB5uqJ|@Z-9>`w@&=HvSARUGng3*rADb!`JxW*!S-hLr10dIjj%W*dKl2JVbPVUn3sY0 zDL4zqHOW=?MMQ*ZZ1TDin!YY5Dx0tu7kN9&d|;L_&A0ixoA!H7RypQr`HOvp6{2oW z7IRLV49G^H8%4AEWuD9BWJ0V=5@5}rqx)e&i3J(`ys$se-elh{xAKj(4m;~#8G|p{oOM|h?bx8dbX|UbR7rwS-JCypH>kPy-Dv&O*#7?a5GEu#kM1n zLTiGtIi-~uiPgnO83y<*uHON#YmD@pU%3AqnZU%!^r&rK=x2a;cTldO{L zVm)SLCF%GE*xWa&&-t`h~`z01^w*jCdZi zLjo!94UHPVA#mYko@9}@L4s{5$cElD9d`ZRB&JL-GYY=m5RF|54XNhwU&zXrN@#9= z)x$1skOeTw79{G%4b?vZOl?awSrz+MW$Uz#_PUM++)-<2IjZ@g2P)Xj_U~2T{p7 zXQ9@>L@N-V;d)|nwnce5Bd^3z0&wU!TqF0A707gt$!0>`FL#x6n|CEg1v5n~R-YR% zDqAby1Kn-~$J>|g@Nzfpt0!DY$o!$QwiEUNE0^&=a3rWCg5-gdzTao##Nfl`E9O0x zauX`@e-wzjimqb6Jn?UmR8#KF5Ns}uHNn86UM4B5f;chzE1`iGFzHMF%!FhbZEf-l zNwVk0T2z=8-(1|?h$r|MBeZIf{sAEdpwup->ZNJ1;2bhaj1elP8^@lKwcm z$pf<+e$A&Tw#B#4ho2;9#}F^ChIL<^|9pDvH^ z_g>Qh_3tn~d4CHMYN}9Mo;pVg=9E1B-TQCzsIha*bAaioCrgrXWF+NA*lIQGu66$2 z!dbcC_vzD$%*}{n`7|Q1cXkQ%_zfL(U0mQCsOR$gfxUsLjHqw|h3f}f44=KIaoCUt4_EJAM`jxCA z39i3NldEB_-5FX4!3DpBHMvb}vXhc&?6-5B@{n3yo|8$^sj`}I;0_3!FzLy6bOv_c zKpyWswRdFC2f!=-VWR{<#Pd*Ur2W;jnZofqzNnUy^_Z*`AWY9dPdg0?60!Lkzjv92%g{Hk%>db}N&A!|U?%iF^$EQ&4gAc;=;g7~oeIuz8EYGAzrd#QphDkkQI z+91Sz7@}Lxth3@uS|=bqy~Cg8#c}C+b}N2H1%a8DR$a1I^iThjolLCVG-MA2U(HB? zuknW6L&!cUEP}0IFB=d=0NZ0sC|0m)YPYxkAoD!N1glM?<~VAURuK~m8G={%JW>RF zg+zH8L;fb}xiioHDCu^XZqsGEOQM7>#i(^}8-TWV?nDgR$$i540PAZ-jJ${dVK5=V zw~?m4{}h(1%?38 zK#PM#oo{0u5%XpiONwy{&(owvo05V6UDk;g0%$nNIq^v;bPatAnU|V{1X$zc?>G#{ zj~ld-qz)^+<4n@qUTGxl; zQ$64PNEzyw>SV{I+`-<=ueYq1Xi!mCAvC1C#jHndm@(_>=8BEo&}ccfSg4jE%Iz83 z6F7z(NBvQsHlBysmg&FEUQ5o*)IuJ@9{gwJMKm6bH=`}%-C26hvougo?DJ++H#a~t zUX#1-fC-JBVUVUz^b0#ksUIe`j1KFOIZnmC4dyy>LUqVgGf(xEi;tI{kNDh#e+{7F zVwv{EREZj-*Ycq9hwi;-C`1@+sy-|y5uO%Iz2|A43T3}P)xS_rDycAD!%>bkf*%Al2c@JK!Ncj}NAPy2ckqw^e!Av9)?&B2q;K8`49 zTH)WWBFM0K>ApaIe+Qv~C&hEKZ&b3YV;YGUi#f8~m+RQ-VHAk}O|<*OAFvgv8|GZ} z=ux5jq92dsn>YG9G-`U^S1L+wK(&dwF;{@k=?$jobH|eD%px|6oif8M>FOvu{P|Vk z8vhiHxpM`P#){ak>QJIV%Smd7oq&wKB&KCHQD07Q2wv1bnBKu75=kl4ovB?Hi4`3y zEzFWLa{y6R%{Rnrebpek7aZ2C@Tws3XGa>lvBbNVH1Fmz)?5=X&8NZ2umAbYDa;j| znL%$d>iChMrdy$v+2q4@Wk4&we{yQaWE=T)je5>>LuW?oD+`e$!-Sn?+4O8HNdCJXgCO_T`+4X5IRCdUDBqIZ=}_$i^Ch zu8F4;F#bDnZG3BpNNOxGx*K+Z$X44ueS0^Zj5;8_40j3hWS+~PiaR!aXX|y0DU$r zB%ybEHo=$q)36rYiVi<6ZFo+Dop zL1@tQRbMa37k}glx#G-rpY40Dq>Otl?4;psIrX)=n7-54|E-a9Vf!tvPxgXLeGPac z*U(np;@Zqh2qa?Os@?1nAsuI2KeTZU z_*wOH?_@R1i>>lI$H}SZP3^d7v=97jznDw*)r1#1M56VeDq?{!t3(!6=W$Jm{U{|d z4WgqIof=PN7;Z^O&qRG|)l%wumWrmLIO@3u5NB>Fylk=6`@n0vUz(g=ASd4fu?`WEfv`|=R~bbpBH#afzA&RO7#$1B6` z+iR+qYXhY5H0NK>nGbNF5tkk)*Q7AP9&J*w@b$P>CEA(QKqLOBmEcSDAx|aDM!Xe)DN?a zkvrFhP^q?{hewga9kGH?_O(&kl^B1cV;uy7t_G<0XwocPoyx#Z^l{_T*oA(*w!d7$MtRdbc zP1A-3#SGM%Uft#6&bA(VhByn2VshLod&Hj})9>risbR>rd!sk2uhKk8xZZ}ki#%Ym z!fMkWxjaKwsxqJ)@z)E2Gfme~%C1Me>|!45wWsbU>5k>4Cw;;tP;H42X1d*YU^O_h z5zQbfe(;K9A9*vcrn1I@c zPnjpSjmux}?rpl_iC)BDTd*IlY(_g(LM6|B3;L=Qp>_1f~`Mkm3 zfg&VL|9>H}`{3rN#49mB?#h{|7W^*dOp;=VtF9D5P|{NSzDC5Jt#-h{k*TNWn`zrZ z{u5A6z29EAq0g%@cj*Y-(@uleYsQMd4S z(>NmpNK_wY7fOt5wp`7*yTC~QoJE_(8kyV zQZugD-l9;!LFdt0pnXlc)gg%gpwV|#P*nJ3(tY>CKNeau6l zLJkkmgZl+VU4Tu7y(2k$NJLhb^0=TN#dD49#KyJ=zx58W?X($$hBLJ%&lQ)`&s+Z( zI0$?j&-&~30WI^Sjf3+d7j%+$;yMUIG&Vi3c~v}(JHy(lG{&kh76;xcEc-)BrGJ>#XCVy%+# zGPX0eX5CBU=rm-iXS4GZF-F&5?2wa9;^Xx7u1+IG3S=d7s~Uz8Y*=yMMdm|~_eUmA ziaHkPJN5lQSbL&YBPdYoniZyA6PLO*k@oC{pZ2IDj&W<)v@6Bjr~N5hqgG_fIP+SJ zUrFXZiQ>uM|COaf*&ou01CjOYUL@E1%4Jt5xwGDR3Dp82<|InungQXfM~bOa*ZFUH zC}~U5S4$5qBc7t6!?r;LieAK)m{@I80gW97w@}VcwS?>y`1c*jIT#O%cis?*w$A*v z-Zh#0_k8W^++^dsCrhD%Ym0PnW1d=H(r(fWWFD7M3>D?zGyZiidX4z7w0V!aG`wl_ z^x(InX5`tyf#WyUdPNk`zyDFBERf{w&zMTJL#y6eKD@B0S+{ zs*l)qj#``VdGAsJrjf~Xx~*oy%@rSNPyo$`;>&2e>1bv_0?63ew&4hA3itN+aNXuj zJ4hggczAu!4Uw46#I5tD1q+Io2DS%mCo3Xa>`$&jagNx|meOz1y+z3(LAUQ}5ZZ6J zW!ULw&)UCkA!VLCvAQ@ocldRe(n{)rit{Hg>lYVT=dkzy(6UWtpOCLJYMfzs?~VNT zHF^41-&@VIAZ7P479CC54Q!L0epsv;N<`^K73}`mn8|Jl`^*RU8`##joFcjD(i^Jibb&xRJyKPwBOmGXVzGTt<97cL=ixu*1{3cnOu9Z#PP z`LWPxQRAJ_sc`wF;=jk#Lhq%yfHvDpdZaaLya{m?Rb`xL!M#+O4ki&|K1dl!w<)g z+|CW7*MvgjxJ`aHM)2(Of!(dWI@r7ItHBpS;Nf>?Or-1&f~2v^Npd!$S;npKjiK9S zo>?L_%*~LyiMaOA^`+8LNfy=#b$8IUb!`dFXsvM0Z zg>?0NCZ~{KSv~6iv*M>6fb&5+d(}WsQV*i>?jU4!OX9Wg^i7ESPux4TJzw_c0N6p% zE<~p<8^_JX_snS-i#GJWl@ATs#ky&xZUa-`>nEo?&E7KovcA|+C7Bukx%3vQOo$}& zT?IK-5$<;SWtX(q_TIyBHbWa{EoP$4(8=4L*%p_FMF-cW*kNHv_KR8n-y%fN=e7M) z10Q{*br~*?Z)LnhrF6ok@^PDkyiz;tPRZlOGoKfgi6tJig-|K6oXD(k z!TuY6wmyVBdk}i8^nnU^0JmW6kTCxNZHghM_Kw;ji0(;M+oKtAorWwWk$)j%#>dy4G~0Z$+XqNMu11woA8LskEiorTTzZz1teE?aEqcoon=smK2+WSY1M z1IinBChWl#X5+758F{~l>L=SBCThvMap%qRMLEE{-@QzF`@7#=;^7RThdCr6AtBb< zydCB_gda&Kk{`G`+AZY&SBJ0eh!reFOw~ZHRURT?%hYqBlu35S-TF|i*1RLuH5$@s z;2W!I*sd1ENQ%{8PT*SEi7{B6oqA(es2-S8@-I zbxyZA``vv!-6P5LrEQ(^`p0|!77fKie+i?)PL!)Zbm@5Y38kWVDAo-R*2Rr;#}8qz zfa9L|N`-4^t%1G0f$ME_Xgts$k}m8~8!iM0P#v8YWZfRwk(~eS%|gxSYd~Uckg8J6 z!nWN&P5;^01>ug2`E+SO_YMEkSy{))n5dLHE(~nlBn2i~GA6>#2A@>h3Ek}`6R+{` zLN@qp=3c4lch@?BN5S`4TESf^TM4O|JYAjhUz4gmyTWM4n48P#vRBvzID?c56L_9m zE)BPK^^qpHX{@K4b2wUQq3wij(_un#7(Cwig zrUD)&fc(VxDjpFSH!Rk4d8JIoLRaPeKVA#(e{ot(2`( zV(j!Mx_YU-`l|&=u!-4p1PEy4)w!eJG-!SMhMNfb-rm6XHULa{c{!))buPktUPB|2 zjtx?cWgfyS8vh`DSABTNERPp+`)$XY`ly}_+HKc_H_qjpHp9Or2ZZ1Am%Ht&hsF)A zeeAaqwj8+$1=jX)^lS$e*2$J7O#GuOtjOchnc;8r{Pr(~#<0PCKsF}+NRKeR2a?J~ zIxc9hXBxMb@@_~}wK%n#p5HF7qUno9cY2*Q;fC%84i+ZqwoIRDoI`B7YR;*doUjdp znNB5{{=-sbXP8N>GT*EVQ{~VQ+8~VJIM+qC1U8d+i;=ziZQ$x@TTS&h$IqG$2YnHA zl{+Z%a*N+DEb?9)>sel9YJ92mn<-oLdG>TIF+^NY)lWM1r+aCybP4qwvPJ+)3krD< z1MJF79_IfLAh(>dP zK|{UqScTAEsUzK>63ZynRu;_VxxZou^$y`&lkL$_oJ-CPbmH!`jCL8dGn{t3jFoq) zHGL{Gvrt+In5`LEY7P9< z56DQ&&0R|P_YYkx`9Lk));~QrSEC#7Kr3vJ6Q7AbS8ii7un-hD$@(0ywo{^&d3OiO zat8ou-)qr0PwqDUN1vFtQ&Ve7A8nuB+p}u3ld8&wy=j1pUDF*iKYoejp=WtZ{8X1= z(&LDZqn(K81f{*x^=_+eMPl+9b*9(eYX1&uElV2aj?#s;6*T)Xw=1sgPcUbiw!bSm zIulera9uE^F~1^!R9!d1vqz7c#JEMVp#Y7#BdRdZALD}Y$fZG9a^HJJR`4UR<$wkuJpoC>rbF*zq zq5A#wSRwYqb0Y=Z9O*nq)~h{?IWKt&1^GY z{l8fThp*ZF<0?Md)4hXnPn$zSlG@X~X8?EF>*)_zh2DaYKURvN-z@W9WC=#eB9{P< z?XwCvf_~~nHGBOV6|qLp=^I_j#uhU7&&55vlcHT9S-_kJjPFoHdsStPd-~xKz{s|q zd(U|K3GHt9I`#P`#s+5X+Hl2 zt;Q1VD6a4#%E~sjWX{q)dyqDt7UxM)HmLOvyqOA7`)v5SpXsOQL!q9~xZZ;GE)I)T zi_(BK)8BD-GxufAsu^>O>KI3_{SmlgfA}giafXN2=$`xBq{l~xuXc-TUjj6MM)_U~ zv;36O)TMD0uJKzP8(dd;;?+DBmtie3=xqvcl(_G4I3_)%lS*v+CHYQy+`vm*p>}jR zLYtXjK*Y&(vTfE@@7cLBt&O4hcPa0?r*=yjv8>oUY%um|oX6x_bQ##|{DK+`tJzul zz|gCY8h1yYvxxaD|Ky8@IvB-h>9avD=^+ZKP_bS-QlZx(Bk9pklwl2KYcKi#22Ri| z!XfBdb6N+~tyB)_3@X?nEVeIhOT#qNe%BwvdPB`iKCc@l-r$izS7Y~rDA2EK*|2YOPqwG$jiTd4mWgX4Qw%ew`TPS64vhz-+q~P^YZ$>jJd{Zh{VQgZ z1F9+ycNH0!6>vte!Dpv7npv;t_L3B4d_kp~iv{|${HMX@_pwLf7kuU9Lc zdjCe1K1P`g^LC&tlC3v`iVkCXY_boaYH`L#f7fUo&42ydY&qa>xtN*{^uoz{$Bqd~ z;AA7jz<+x<$SZ1e5v@tsSUoL5oMmlv*Xi7mG3@x~L9Map<}UK^ z-I^K;2d{3wjhPlvA%lgOG?Bg_=e1Aht%7yX+HQe@es7@;KytdVHUl9CE}q0)WxvY! z9Kmj6D1I!#mr@q}GZ%?!BHB2(V&6vRn^fW0r)g`e*1r6B8`Eg=R8woe@52Ov!E-p| z<)`|9f4cL{=)IH?CKSiV!SQN%(NM3p#WBk2qGa3O<(dLMk(2e%kA0cUlIiZKxTXgl zR>uvYk+6AYu9SgLaaEC{L0%he>evMh7zk9cE*t=E`)`P2Fk})CH(;Wk6vD*G;xjDG zeEH{z=+n4{K|i?&?#T`D#+}b(`+1Vho#^I6v43MNYm|JkGcGT691l;yM~e7W#A%|z#sV0M zJqSIZ=09s0w&{ zS@fJII?vGR`&U(L0iLRZ9mI|W6$jci{i_ihvv=|~ylxv=6s$R}tuQ02RsTd9RPXml zq7%VGjk`Gb8e5xgXMjWzxBE-*Dqvt@V5rF7*FvsxaPXx0kpP&cWSM-9a+qHO4Ug$P z#ED3@K<_gyf_DiU)$8H!c8(k;Zi^ zg{Z*sxWIw!WZTlQ+}?8d*N@Hs{|y?~YLCO)EeCP9Tjo?nx#El*dd*s}zo-k=y$Lyj zy-_iN3*o}G>aaqw&6^{($0G_jhDm~El#uhLd}RH0L?u_6rl zIStq~8=SIV0-+*+79o7g`4VmS+ruv3|LZ=+YBMdFs^iRO#Fs$}L^u8(JCu+_A%%#JU0pQM`^PjxQp%keKSSIHB zB3!X!`Uu6>Y!H}lAh0t8+!V`_0kBl zT2VomY%6&{O(ylvrTL&G%j9ym2|?`(4)fHz*2W+hg9Rd@i#>yJF8smjjTkwf)=y>! zi_O&xj9Mwa{O=jGb| z^{n2+Rqv*?o(IqPNwz$D8w>@FHv!d?gb{~1scuDQ4a3o-WGBOlCX1YIa7iaMl+?x@ z^E6=x2Cj6!s4pwiJzjl^&Q1W@DyGxWGXC$51N-VW?w=x1>iz1WFr))sCU!!txJj6)h|Kd)LVS_g$zekgM4=oVha3&&&3{-`?Om6u`4{aG6fS zZl*S4yu)sK&jSyQip?Uw;EM^I>%B;fcJaD@&JWJ)0f7=tC%wV?1IkU6v0x9S@xxLv zA&ai#9UfGL)9d=prOpY{t0x_gvaIGh6fo{o|Jq+t8vCzj*uLPS^32IqR2h1uw>Tra zEeK&8)+I?UY#19H6fpVmY`@O?^hK^!KA>xe?UL=)Dw?^)iR>w+n$3T{01@>k`}C@n zI|If@)V_79ID`DPHov@JEGrluC;v@P`2}-MX_%rD`<4%k3p93-coQxk25>C_s}_S7 z1Eovb&Lm9M?$*!2h7oTv0wnLeuF~ul9-m*$aqdPH&6im07n>{vMY1~OVV@X)dl9hU z`gRoB87!$$ECP@XceZ!oPiZSUHjLw%l(X{Tg1VW>W)7X3P?a#2@2)xelvcLsE!5~* ztbQ!a_QA{I|EBs1L!F^`rRH+iDwST3(ff{^HonT1CGprWgVV`Bz&}eQfK~s(yb=VC zKtdFoDSg-3gtVGMyeEv==xR71eOU4jC+p@P@Lp{~XqzVxTjOb59?Du9Cvm z04!z?iKuj=A&xts!Pbg3n!*|v>fY3B+u2yR;Ymz4C_hr=l$K;1y|bZ9TV_W;dIwH> zRq6MMPv_{C-`WSuIO1~qnJqJ7{~2{9r;(QDut2h6^afZ%&|(oTw_~r8(55gvcH;|lLCg`I$KMc0;x-D2qJw$r$Jm|U8(gXL z`?sDE2hA~B!lJ&Ti8jm4RmR3<|J&0wTTC;k*Uaw>qqEc43vxQJuEA8)bfA_K&z-l2 zPNBo!5f!`iBELo(3I~Sw^%{rEQ)_?V_~yb7l8G}eKlln2SePelj%DpnmLa*&TK;+n zSD5C}i=%j(XlP?7{lVq&nQV}Lqse!vr}?^jOI6(3#gUMI*6I_IY79Id+DG`@kfr0e z6%Rwj!2Mj*vc5gf^~5PR2}a#th0LY;FPAjsght|-r=V>8;EHi@#hF%*Abvsl>hW+< zX`*~?n_lR95%!;=g|4>y&+YNx51vv?FJJwJO8D5RR6O^_X8IP5{@WQQ*k`mVL>usf z3iE&`o#w*}FQ0=;M;Qi^uX=@A>8FtYbvqT2xS{ora|4gwj@qBaOvN8qj^8G;S zaqJe{P{|C1zxh@k5goC?#5wn~exwil1|||JkPGiQ1ptP95FA8)tj|FP1r;6Tp;}Q~ z1s+TEBm0Z;d=pN{;Ah7r0W%vc*ytGy2$)+0W3iRzZB&f5#2zwn9SG&}qlZZQPJ;~_ z@sM+7=0kYGXxU7qQWyD>hl4k2+F^sQN^rG9ap@YdtL%+&;DhAnd!R*sNjiurBaV%*qR!EIxOJ zTC0+v=#f`8d00d2Z=35365r{v>XAW&D>U1~K}j`mBa|t^!{oZLjxmIbDyVX3@(*`W zPb6mVEt5aTa_h}M@+k_|u@0|R!!v7r*7Zc5a7_KP&Nm*Z_iXKl+44MlhMVN9&epe- z7*dt$8B)wM@{*-x8j`F&SH&AGV_e|Lzk9Tu_BB}NRd}^8aV^1cxD`U$RkN-C z`!^12#=K;&*24&$<$|&M^23== z;_fC(NbZd(XHJx)NwXU@;=!;S9lv0j%$-9iA_JDxa0UMt&~8GiCa0%Bs0Bw-@zz;1 zSi0Hl^k>A`(r&ap^GYesrzhg&Evq{J*jnjwRVfkiV@iP>T3;A?Gci#6Q`VdGUtt-0 zkPSQyW@MP$Qx!4`=^tW|I|sN@_OiuckIWAzxHvZBT7BXlZBKs%n=LWkW34s(?MT0Xb6qdl9THKTQ7O!xfrm+wnsA<|o)MwYS? z66}#z;hlGM@wqukek49sF5%R*&QM@CmNKCeo*bmzyy-uN+bBX2RjCUO@NDInL2e*? zOR!b;v3UH`XMUtfO*-n&;@l_^jk-AL1zpTcT!#yFsL6+;-(# zkFs>|ci+L|5VY?6h`za-{V3Y%+UQsFgpVYx3P5tNO%~QXbq50Z=+5?R3>1-8Y;tlP zThFO#-za^t!ugUH(%?PYemX6cY6?iV}kPK3)-z`z%FN~N`fy_xR`Zp4U%-#9V}wl6PJ+}>)A zklX2AImZ<1m5AzbbbT_||D97|SRyi48rL8_T%lSApMqQAbA&=P(8pUVmMURXJ5kW% z9TRolO=@fA8rY09Nlj`@7Y3DERYUWKc15WQwx5$#?FejxG?Ot z-syVZnK7;*cH_kW&bUNmkBN=yqoCnLdTvfX9M6wBnxS!l- zjkf}Nz^u9TrPK$%=ux$P_sP<_*LR-~rdI!?VzH}3m?0NS-@J4e?30LOm~sfJ;V`4{ zd&w5hrKidluLLhhIilfyONmgYZqsq&BVm`j_}CM9(p9g@ynL2JhdObZ3}-gnKI}wc z!xd#GPa@PKko`RQ+sg8L>`bH?crzr?qOq*Ol{q6Tz}&9y^|>e}RT*k;O#W;hl?Z3h z0}5oc9%BqpzI|O5UubLj+KrelwM(97#Fp_vE*0=oZ?LD>9cF8MOC2jj_T-(Zl_Ol~ z1Dk=kiA^k-RG|R0P&3Gy_eyw4r`Hv} z1@{(Z@$1Y{IJz z$b5-DmBWW#NapDFsU5ICKd2AV<}gDv6a|N$yeFhYFhqpOUAsqFJ0lTg(Jd@|=IN`R z+SCv*l%rKfmNfJ}?29-A$sxOnhAA+%d0?VP6~1$_GL@AMEJ6?T;&TP)GOpec#?Ai6M~9~&YPPzF?q%~Vw0zF1Qc0)cg1HMB6)?zdWKhSGT zx1*~|kgtY$T<#(MOb^lyulSKj)#a>2I8`R_FN`kCU3(W!+~>_%p2$5W*bz(AC`b`? zJ_&LxIQkp|fwsswf%=cdHAnLQ5IQa_Ix}ovyl?ZO6_Z;DXHawb>VMafZsV%GJn1W8 zbKNfrU-S4d&g+59nQr6=vwTn@?Nnr8Sh@J-3STRfO+CFHGLwDZtcbl4vVGRt4HuFg zyPqotH+mUFE6^qqLG^h%+Kl^GprsiB27TZ@sKkCS6_JSNZ^Ma=iEb%bknM}Ucl!NV zXp>emSwh*N@HDWGf-Z7Z#}7-4$n%?6B-IDfTe@9pTCqJ$`Ix<6YS=-9I+bxFdIUff z9%D1t4BiM?%f0)8p~k3P*@B-RYmGB)D!`6WKJux{*9#7$@5pTxd5rb^bPlbhjg`hN z2Tau3l=T#1ip|tw75HO2bK@tnUa2nIWg*NF14H)5s$evwK6Z zTK*7I6b{s4$tZtbfOal6=yY6Sa>9eCNTX676-Wrtr4MnMA^7#e$hoOO#o!fQip}Hy zAkw7E&HT`9N8~*rp%+)dVM&6@2Lcb8wnr|tw7l6Ug4;;se$(!=n(eWCkBdFAdD~Wk)WQ!Q4IBI`>*Hgbdv{& zVV@}2RsI6yqxlK$v&;hbDa_UQN@t#-aBlwNrwmi-7e-o{U8RxRO0I88s1ktIK7l(Q zass2~NEOQxUP>MKKQI3*P7!qnO(A)p5b1}@j?_yGB|R&OOE2f={2Bufb-Uj#sA697 zgA;iT*b{%0+)g~|Ady}=?pmSxEikIJdMMt>f_HZEqz`~|Wk9XqI^u8S^|6ZYBA+xh z-g(5KLNSHWC_eeqzW{FA4ys=bHc%MYXDEc`rwJKZiuhUA*DR~yKYj;JQVrZjSxY5q zYp3B81A;-)N0-i}di_RikCgo1E5`5HG^v2LX@T0Ut^Jc+|7LH{N zd;85)uwp$IEPO(&PhdX~rUXrx4{`hoe?j9$1v%)fy&ZY7DsFJDiF;Mzu5U@88}>L? z+SL3PXZc875XPa_x>lkw-Q`xa{|P5*HpKBAm3X6^4XToYg^84rK#JHpC3|_c*f}nM zasH82h#Tc(_d{eVaK0QIr#$b*ln~Pi?&DVs3P)w>8@9i&n9KI(z^$eP|@b z@EPkOx{pFoD1UsRoaXkY;fgC(&M2a=F=w6>#AniS3MoT9*@H< z_RzOrw#HSj9@LDi77AVYj&bmTA2)}wR!dGlPiw>FA-67k<65hBkxTg2Mo-cGL4)EP zMiL(Ems+vPgaj!YI$4b$aj`!_W%_picN#{R3(Ysn}W4D=kimh2PnY^>p$TM82DlPiSM5_JtA9aRc-ZxAQL=Wgysmt z7h@s=tn(4E$T}G;9#iV3^-qG_1W8y~RiroK?NaZvHdB&?DPmq?s#gxO#`8B}#zD+m z>ZR5zlUal5@DOCK8q{?kFvnr*qXZT-EUXxF?p20d$Af2C?Ror(Pk+72V0ZW%cnmKp3sL5{lME~FM{u7#kiN(ln>?rG zk%hKa(Qd>Pw_O6Wz_|5N17{fnqbp&>VZV#c2~VDqnp&xbKOU5^BogtSy{C%2X`QUxC#LGSX)>mNM09*<08NSo~y9M_RrtwUG$CF+mOAHQi zTK(n6>X^+Ij1Mf>?*z+7K9^#+uIl}##}@z?+)addlay@5C)wI39Kf6HFHkJ4*`O2M5Xyeu3QwK0+VVA7rd!MHh*ViyrZ>0Eb21zbnHu%yW^?E28NN zoKB479(KKhX!}j#Ia6I4#+wthD0s<)JOt9B1%}-N*z}U@1PfQ@dwZuT!LKc|1UD*# z+5RBdSXGdIxhJm7+>scvig3iY2@Rx4%{pxpe_s1DV{K@>H!g(RpmB&x{6>QH%0XuV zrqb6NK;F8mfsl0v53Q7Qb?i!9>)cGqWjC+*AlLc zwTT$Erhytv$OjZIK$*sb4>UNoVdpj&v!!N4Im!C9U)U@Yq*(U-1* z+OuRMsk2mHX-79ORQKT&X5f_yk)AL*=D8&fM4NMr7MX{iC~!IIu~K{ekO=Qtm}t2} zO_xrV4eIKZr30DsoY20X$Pm`rXV{$+2tQAXyoEeV6t5OeeSh7JGyYLdpboIPm4#{> z088ASU<+uG1ZW(LqQ%SGl3C;XMn&z0$basRNEAH{;v4uIr#PryF`msXi#?ZQ`*LSsEhbXg3B1Xl zSfcL=$dv23*ElTSJz#c@T2>ZyRD_F+!A06WF(o}XHU)8A{+V*SPt4iHD3er(>sO=x zN_aSfy31FY``$j=^#=3L`R`LdErRRT+zQIF`4U4V1Scp3zVf|SpclTJ9Ndnp+Lux$ zU7$fA-8v0&X56z96CU1jkmcyYgfb|$XA5ZuTi@x+_fDBxjOL`)?Fu6J2Q@vn^1p(y z83F^_424{+0~~34{o(epAG!fX6Ik-V;T~3JkqWtYAgbiVEV4Lt3nJplOU=0M!Wpk& z*jgy@OOX)bN#*Q%Ob{@yX9(w7*C)6zz|p%(H3*=;@E&Cl^coE9x0x#O*O*G<(JZ-F zLU^L`zrdiJA1>htmta70f~S#Z?OM|QBI?$sp20Q|GVC>uw5hD2zN!di6|~;L7{|Iy zirLg}r>Yj>qt}Lssvb8`{va7=BXA8dAZbbKD=_Z~*U8mu^f~o_#y7 z&`(OtL;1|^LZ9nbxNQ4p=vaM#j6zEwbH--F)| z2em7R;)ACwZ+SPK;I#EO*kPYvwM~#dI4j0)yZLg!oQHw}<3jX#izkt}#%Au~hiSWd zZgT=*r^6BLT=D^oW`ON*DmxVk?}0auk%>dCV?}|$-bdAX`19Z9tn=|}DyQ+7%SToj z|67oD%CTl;#h4-gw~Yu3A=1dQSA*59y2ckjq=)paC@Ch#m2&1= zJQx~`Ac%Aqnv#eOC!VvSEbF<{4SPG1ca7a_TN0vH?(6Tl!=j9Dwd{u%1$n^}=RWom zKDr=E60C6I%9(99){`gf2^d$@k1Hp=k*A6qkqpYy0mtY?M!Q2_cWfuYdNHr~{-)Dx zkoN4sl8*@B#1!HQ#+`F>#Ek@ETLhx4ngWX0^Z)&4ecKLsuxCV4WZxn5*izKU=$e4# z&a$samn?@&7iYYrj`o?$+;;oNeslyhtLDQx;R$F;f^Tp4B7UahK#njFBCqHA{dPNx9xL6em z;XBt+8YA5ik;)7DSm|%W73#kRE8F~#@a=8z6ssgZNp>TZooIf^5oAJ_xw&|BxRC!B zgI3fh)=6j=L51Q2R@3|rPq=iCq;x_oGMso&YJ|%$av5;YG8FHym(S#IotJrlU_9&N z=b=}xEP64S=Wk*Q^+xg`58ZmJv-HtJn+w5|32nqLt3ZGxbW;@2jx{}?$j8_6d$+*M zGI{2?k+!VQRmU{{;d{N(aK_o~lnf%5$Gs5RE{hXXxndL}k3XXiKN*CGt3S30cQ?&AdyeN%QQ!}SFN0y*z?~!Y9Y^!^NJK0CS2<-Ha4t{x zIKWC>k(~n4wB8ep7u@c6$Fr!s>y1-e$~Rd(0`o1Do7v4M_$i`%ubjLFOC+A#1yG&E zCi!B(EoJsKU=wjPh|~gKz_q`Hn<@Vcl!5i1u^FVpV_ba*zoSA1zawGCmA`h;p^c)|xbgEq42Uf6$U zP(?e*#~fThRCOR~Z>tq#r6OYwH%D4fjRy>rT>sJq7e3XwXM+-{4^J*?h(g_&q6)lH_QWrlAdVRs&o0t*C>y{>lmC83-n6J7f#gKlVGR|B+r?R zI`laaxyHwA6_)W$y63oEp+J~Hc6Ta>ymV3`YI$%u;@KPitRc}$(T)hBe54ool}{Yp zj#(u!)z2ZCxCHf}OG6eGizQ=`k(02c*OFOWqnYWnqvb)vNH0p9Zn7jss?MU!LiL7d z=YVA*T%&>Rmk3(Ldc3Fv#iRd?3q1}kv%grviCTi>pp#d&+mD?o_~5( z=e|GV`dstlPnd*?RZQ^Sg2OC)Oxv*dy+tbII?wIAo!tJ#2Oyq?*M-YKyfCg5N17qrd1 z%PblFX6mY9s;)RDIma`#C3xP@X)ab}GIA>Y?XJV!fi2LR&V3i_m_focfE2BY6uf8I z;J7>c2Njbtxg~s4j$=FzspR=whU}*7A~@Wo^}rekA)v0vbnmLO*@6eN2?xi2%8I$fq8@TMhN#@H3U#a*a0n2L1d-{P zEVgyi48J*AevZdaBbQXwiMPA!iuq|x*Vk2L*cfTs_jSJi34SAeF;@BX*%QR8;-Ov1 zEp$RE{?Z}U!=NVF(93L>AZa653qY0ARtfeYYW@9#%^mjhK6SRE*qpRPKlytxAEU`s zt}Eo`2X^(O3C#IuUVp&xXId$ZVb2#~eKkO*HBix?BT}vyt3;H{pn2B@dKOO*GN>KZ zD`+1V#?tpRta|^MukH-R{#{ng%SfihsdhU2muHUbssusr2RCpM2=#6q3$&LXeDNzIKDdJzmQyT$W_NZFBPvmaG zl-FFId;R*RCCb$?Q9VS*>+Ff8ynLh8mT%v_(20cvu1&?bZSpC{Io>c#!x$i3JL)pOIevG6!k9T+ih=2&=R7UTMR;`ejzhLiQ0!gL{Ru)}>C)UUABEo- z$Pu~g>5)_fN^6VMe7oC#`PjZIBF}HKPj;C}BC&ZRH1+9{R*QV-;M8on_r=_=WBPH@ z21$|D1T~vs0|xX{a}3RYIMoO9Tpp=l2QKjtmFo$>#z-5PE;Z|UZO5DoR~}r0FBFz+ zEvM>QuYnk{dCBA+{~2!yWMOt)(Z1o4>VAJlp`mJ7x8*s9%G`)RH^-^%?bJ{zl@g{J zui6FsGN+}l_9A<>QAt98+p8+qv0}7H!Y5OFWc?73}7tFA-ivv?>+7AYP@b-bNh~JOv4vW^{*eVKdFM;=Y1>nuXZOV zoy#x{D4&^`1HG{gaq<33lw|xK9pPLG`52-xZP0*Ohz1w(SsT2Davtk&b>$}0t4j`#cb{w7Mcvi=>74sJq8Q)~owLk!^ty93Fe>2}izTji%uw5{;pKYE&^At zWVb-494230k7`iDQz#j$?)N=HDM2)k9;< zb36bW%wPS_?n3$oggxcpa=LY2YUT!e;1qM4ZT+KR$@Xau@-|j$0|U9{1$(i*&f_vh zI_Hv4HtZZ8Q2}yePZ)0phJr-AhnSVZ>J#X^vzcEE+JawvyR2e#7{~U(;qXmxR)Sn_ zN^@{LpKVU-vQF62yn$R(WybAlCE6=T%&toZwiWS==IFe*cr%Rx!}b@2+#YWjGc>yj z3`s_Zi+*o!8P8Gh&@+R-?>-F9NC26>#dmxB`I;;Nu-%=4?%(|0g(lv-AME5cEcJb> zuPl1q7cKsh@c}tE5jSXfUPuL*1Q)~)a!>RW_IoIs!`HhEuUrre>{3tXv;VzT=AYIf z_VK~H%PWq~LQ$-gWNAgso6&R!@7YMWP|+v z9j9IWl6itW&SYxa5`c_#N9IXyc@2vFhZmYNm_dPEfSAD=d_S3U6#iIp-RJSO6mimC zfq8~8!aL6py=}h5oyH@aU|leKU*nM+BBjxcT2zyzP6=iGeRKn5c!Rc=n)Ia>ot?f? zd*`Dtrpt>9DXs@M3v;w;3TavgN*wOFylyJ}VP`q6f;N?Ze@tu?b{>3!cHEkGVa=v^ zj(7*EGSNPfgZpZ>+vbvc*aC0twZ;&E+`tr2|2yMOSBD)ppJgcjo+qA#H48${f)fO8 z7a?#<#}9>Y_D*v8J=0&)oWTh<-#%|lY^p1fR~NKCV`$Pg;%V^0Gdx`6na1!l;MW_><;!-rX_*9b0|^~4Y|cyb_HJ0Sx&xXXc4lHOB`ncxo(+P`4+dU88vu~&&4R7NsjWKkTz>Hd&wI$SQ zOJGo#sZ`IyxAv-%9UWU{^CREGmo0`J67|_$|8a%rm;#)3s_-Rndi9Cfy}1^-ZJc5r zT?|2uA9P=h;j?Jd-%u#W{=JzuF42DN*hMjsjV2+!IviZEAbUzqa_?HSEIVjmaD<=5 zCu==a`Op-%!7~wXiU$!RD+2tHhvU^Fp>0c{!OG5)0BjH13I@!T7Q4Hd4O$aNeHT-l z;P#vBF(f6BC#e?rPjq9(>)Luc2F#qe7@YJKTplTqHlEy!W;)2Zf)6W-f&go<#N(Qn zM$Xg578>x!{PY(4y9&%W+~ecp*0-;4RPI^0>J?21mv3U&kN1{^zTZn*z2qhI>(a0q z;u*3F0aIGyzr4e&b?$YWQo2z2w(x?!C&5>26q_s4QQ3=ja%dh_Si!?f^n}ssAq3%Y zWAu8Z`rUcCZMfD+!|Ml*JQw3*I9K}P2yL}}<<@@dnMOrgR&NQ7kWI^XYvQ5V7CQ4o zrLXlC`|>U7Z5%T7>pKK9<_B^UvMq)c>>U|b9q=72v=S?sgnsnfhkWdo5=UbOUT2qy$ z`pJ)SB3U(DWKr9F7}jLqjz{M{pmZ|VRY$4O&whS9T}{~7_~=s8vIsj74t*^`CKX^uBqvd6cl!(hQr;C|}v_a7$Dh)E+(&u8)1upo#) z-V+J2{w%|}kGs$CmKT?iQl%6|l{ zU=R5ux5o-j5cZ%pQ23@3uVO>X=ak1hr?QqtdNYzePc`125KJLq4`PS}!4DSgR_ZYd z=#$tJrf}c60P%DP@a3PO(x|ZzJ(9}R8p)q@4KIKZh7olRt0Qk?wpMo+uHnCmG!tl8?aBla;rM293Y{#x=E|$yocHGuev4A3~_r)hRRWJQotH zU7AZV`BMob8a6|UJtfmVY>MEACCOZ;OX8j3H?b38uE26vuDbLBW7yrb(KGy#0H1+FBGG8`*K_SM#^%7&DXrnX z{uLY8pSIwldC;%VFxlitSSw%5gOiOzh){(34O^y^nRg_FRh#Ww_X$T`7+lEyuauZ^}5 zBuVj9>Imu0*!PTOqKw_!|Mi+lA@Hf-o8AeNSkd`>|lv80c%?wAecT?xWtCy2K85`+^_jg9^ z)2*hLtWnX?KC4s^KMVl-{dW5$>P3bDQRuF++V;4eaNRNC)_D;xxQGKqERSGu3QxfF z>s9wwt3y)vY~!$(op9rL|-gygH|uTYXrI5RzZ zG^0$XDKb60JBm)2+mnkuu}$$}>ks-tgpG*Jw$qrNBY%<%;VD@fD_{E)oVr{J)AuF4 zuopNV11BUGC8Ibf8H61MP7>0I84?3J$~2pPsc``b@ZoYD(0EP=sW0nAJagJP;mowT zneu080aZOezA(9YELCgGkf!qOe76RjWoVWF`nan$MHh+eimKS*6fRV{Nt(vRNv=ri zSdjia00n#tXz*ckR+Ib+6ZzZQ?|ub6?(fU8dpT6>N~~Cw|LuXY@QZA{>w7NX{+J=I zL`p7r26Kuz(|_{;Ea!q2H$-pxt2}3=gG6qcFQ*g~SSs|#i|jkrroo4B3gq#(#rA0@ zlKT^zzkJL0ZVmb>BAo?WR)#Gbj1edol3%Qy7EUQG8^{xNzkYZ23mS7zN-;Dm!8j&6 z@`!0S1>iL*PIBN@kBKZVGbnVhx8Jfp_T;Z!bo7)^k0{QT=bEuuL0uip%P?)~PmIyF z=|8$!Jm*a#eAIt(K0gKI@ufKt1W_@rdiu)louXdUmuc8-t~2M7v#@nmrJ6J@M9q(v zezj9PqcA?x*zw`{xvW>CsPbgcu+ehxdA(=A?D%eN!gspLjuMOy55jxrqkc%E6UFWu z5u(U-3hmeayj%2hKSCsLZ_vG$88s-;tEsk)Ql+3I9}$?%bf>R-W7#|%G$Y|Txwrkn7#lwXO95#;gbLwRGxn3I(yx?CBX7{ z^8tu>-Tqs~$mrB@;u6m2Ia-U)zgz!uKh8HEC<+&8DaygjEJ_WzZ|2OoObDYg?qqu& zd~M5SLR6-&-;Wj#2GyxZV`jLc;ixXlvTbU*CpAyabMUlPS|nodMS$o8?jt=7lMYSL#&V)~GQarLZn0&=G%P4}6QOaR)K8)Ft-gg}w z!m91;PdV3`(T3Snc(bPDU{V2|9NH?|txmUW?Ks`6U`}KLtOHg+O&G4zwjn*F;a!3h zt7NX(6meq&v+c@BinC{Zx0)cQZ5r%!bQ((IiUQa zMKye`B7#vbR;cTTaclJwl>+mYj)(_Aa=Nmhvc?(li6O95NK&0_9rKwy1=>|n>3Y}q z#%423`trxviQm|#?9$i6Bp!{$c3Tna|$ejSo9ZfgcL z35g}$qFJx;1e>_ou<3zxkX}z%1uH8S{MMUzPr8RuDR{;q>w0BRXvdH1r%>qQ=62O} z&vBP9J;n-$-$v*;Gp&pzMR3U=_aq&at}_?5G*oFLw@-VtAB1cVBW)Qk_G8}^Iv?OKwf2N-QTg@i<^nPIDp z%l4P${+E)r*#SP$1tHC4f-y@dQj$Hq23LbfQh+~$?rhjc^dXl1eb&g!3wOw87{PXv zg3h3O7gyI^=+EU7Ly?xel|>VCN-(X8#3W(X`$WO+e&fwsHxC?tXZ^#k>^Wu_t?wjXad z-w4Jc7Y_=Gd%{*3aGT3Y`-7b2^(~?|MzUEbg_m{~!*?3@H>n(Al078)>rWDXWsuUB zKQi$@Rh|m4a5)kKb6&%lEP5w^1Qn=U?ZIR6vpcU z!YYF-SD1e-5E8gr{D^Tmd~>z%hc z6cvPQ(z1&6XJ72?@tuQUQD_wdq)X$SU8qfaqD4_&$uro8lxNAZv%!%P9(IWOD};A* zMVx`XFJz(=mj8msG}b{*U~DWLBEv#Qh)K2?acWTR7ytI;4{A&++FS=IsFIz=8zT~b z4q4qNDtbxlN_%0M-({L1kj2$tT5bUWJ$637w-xJXZ>uiyt?dwjE>A+|BUX3YRNazAWho&qNlmHxuI6vx?ZAwMqx^Byjv;kJk*zBdPL^HsLY{&%kpa z!Y!)=>S}Z+{#IWGVh}A2fCF|Lbg>kd!d(R@8zhQ*0x3&gM_)l`)p5`7!A-aZY9$X@ z%qG6=$p^R89{=<*1SXWuA|5lttt}ChG{Np0NC6`_S4hsXm+X`B{ZGd=8v~|!Hg_S! zkW%rjT0>$VB`eW_4@@ZwQ!>MpaOM9E?>2A-=`Tmn%|(caQsu#n4g~!KoX85lkQM(F zeKqY5=Wk7Qy${YG-vxh{7q_(c`=QvQKxEd`sNGAqi4^CZVAtUUkp!QCAh<|}l_-hI zsRenf^fO3|cE}}ZHriie`VpsKWaQk zFanBHi<8`d3tu>Sb~t&yi6CUnVUkun_^^xGt68W3YP#snuY#k9(UK+YJpP<@6Vh&y z`pzWzq}gue0B2rX-H&it7*|pQ(BVDlnMBC1ZAA4V3urnG`y|KFLM|zMljIa@L>fhcy9&{p zV?7g?((CXu2z(iC%BMh8QSS@LSdmoGlSjRUA8S+kkDZ_QqqDo9(izU(z0|TP?~ww8*Y9f;{}|WfFnvL$az!1?<93+R)+^3V=obm zXDAg);CMde161nK?y@9NFr@s7hmbG}E}%+2`P7Z;KVg)v8z4@Cn-5qIGsw5USE9}w zC7;I0IC#}YhUkqds26QWQD9(gkOZwV3_0lFPRL85%we}rm9K8OL+PWZo&hqDVKoM} z1MwZ)h-;G|H5>ta75(d+KqLDEdps&bW>6+2I8Jfvi+51?UR%6Lt;d@>J*q=x-LTut`3Z(DqV&=_*iaI zhD+gap4}i4&WcS6?wu>1A)}=by0wUU1$-(18;gv>!6V+11@ilk7`1Pe`d}|0UkaXhbtshd>5p1q#}`BgeaHMkf02rR4qL81mxNqCLjfh& z0SrM^VaLU%1`LqceWIC%hF!8pDrt)yNJm_$_CKUcAv~h!1aYr}QeJ%aGB|Y1N$FHj zHR8i3+;G^j0wI#KU}=$fR`D5W`lh2^GU?&cbh{&#UOkOhQlj1 z6BRHWLwW{zf|uhl31xG#{=_2gh>eQCg`=~AlL;5^cD;^!qEQEVVjQxLko*By$&p*H zsFwoMyu^plFDz@(XOZE;HNZ9oP$PK}suS8kKIhLqb@D)lpyfCjAU7kBTgLG&ClM21V~Dx4 zaRLq$nb1cDPTF}Y34$dR0tPCA!~)bx>OWd^dc0tiW~bQHZ-dN^-9~ZbxK?f9Pl(Fc zJqaCv8qItav>PWuR;<5+K4BOp|Nf6eHH*xB`y?>B$W2J_7abLS6&y!;=J>uDr^ilu zsgVxYDcA{pJ{qAd%H7~Eic7%_Ds4on!-1u3xIt+GHT-6H41NHcF^GXAAOm=Qz<&s2 zZ2q)W9sh5Qf6n9u1QiYj95aGifKMj9k$ScjusfOv1n0! zgh$O=kE zK{Esd7)2!v*JCy-Gn)Mq$eU@#uMoNl3l2;q{vL>yFvp!|J|tpo;z3&8 z|NcBq2>c|HdrAvZ+u_7*X8DkRC+(QXsSqiy^~B#l{4)XP*Kz6k zMKAILO7x&Es7{<6gGXM&BjKy7)$=HhC@cr30_LK&ajdeR5|Lu~;^`34PyDGn5tCOl z{*h1r&%G3~ApbIXzC3x6-zlrx+}zvyb4=rg$1AP}`sm}C?W#%%&x&J`UT+IiMv4cg zFY>4XmE)#LhAv<7D1fcPAxzi10K?vJr9JDK6SqqKn052-u>^l=op)fcL}jCQ4n?B? zHddIhvFBVYvZE#ur(26;tlEV&|K5I`utCynI38h;7)7ykEU_Y8Pe5`o=B=T&q#u=c zU_+#FH-Dr{v7DO{>7K4hawe9h&?nE%2?K1HZ=mO0RBZ1`1bm1^J#kkBV*|9)G>- zr}-pa8ab43+xxNH*!<+DL$_hIU`1WOu2H;oXq5cn`TW-1`d8GvBsAU&(PIbEwD#= ze<)yu$z$%_N=UAC6rLtc93|@heJa^qbGMj;*PQ&vPtyrxg|+7t^gVyu575XTjG379 zs`>RqhgNxjj^XRavFWDBGb+eNd}^HYvq@ChTY*+)N4pWn-L@Q(xvip5Qj!4WRSLhY z2=JgT2%O9F-&3>>71Ezk^HorBdSnBGa`#MIf;@EJ2M>XrWD}bi07v9eFOc%c$m)(Z zPFBi0Z7X-f#}aAy5Rpe+tG8_UCq(6R|CaIJOBJ9r^3BWtV~CT83IWF(R;YFes~88! z5fLiqPjhZ-Y>a5VIz%z?fD~)rZ2!OfS0DO6ODIJJj<*3;h`hoFYEJ>w3rX1T{~4_< z|G%GxaUy!8jhjal6ui;kwBNI!w4osI?Z&Bat60GV0?$qswIa{9fgXTCMT9W_@0)fc z)KVWU{|Wc#!96h34r$=Xco{?n0X z)A4D*YT8JzvR&325U>R6(8x$`gMVlbyf|^?3BYc8**T@JAb?td%CGd&gf9mxP&xn@# z>q1PRXX^c1YMaOdT*w2P$Yby}O*BC=QY_h2YNYN8!C-JO7;C$2S+9QMm9rWWM5qD&kE> zBip<61*Z7t=8z4QAyasd1?{~8Hx7(7_=OA;61eS6ufp**qG%!BYexO&4z8Vj(YrzGMO;1K4-eOk;0>BW7&q8_~@gScQ)~!6T^>3VsegA?xP@^*490Xy87(Cf} z3MjljU5lghbXYdgWyJeCTS0;IIoIRh(8Rw&6;T`jV<48~cbQETz79Y$4Kz3m`4FL1 zi9@Ky9xD0IpWLQ>ndvNO<%4+lUH-o$!9_>g%|)uMrvbY$;MgrcQ5A%@nXQ6kg9?bf z9*j)EC7+=H!;{`E&YQKHutA*WlpO=)b$O(`&C4G2!1|vGs0~AEO&~+1`Yp2VB~Ea? zCBSWzcf8Me$*_x37&NR8<;zSFVkh7KCrXimf|aj(&!F`O#9*M-e+x)=BF?-8v?nnm zszQQ{#72bvgGLj+xC!oR!>RIEWg&yOf)p7+_S~^EB2M0_H)VAiaGoRQlSHBx2zjOl zr~W5uE08WiqPEyTlW<_14=05B7*3Z%W?F!EBN9kJ6d3~@E|{VGnVbrh_yUUkp~DQ= z3!n4eHb5R?I1)sDTz~}wycOXB8G#ek&cKfFosei2CFVtoL^Uu_`Z0(t-})L*mJCPP zOVgfRuB4pzLZRyfw9tysbGg2e?ho6dqeuORuMqui_;|`I00v@n!OY;o;^#?7@EE=W zy9)USj1WK-j&xm^#@~Z52;ml(f9-!kUNEk{ha=4)_lTD5>HCn9R@dnZ`#yj*_oLuA z$a-N69zgTJHI|2oNH?FniY)vA1yB&~;2r}aa@wOU0}B7}seq0@kR)hB@B)PldAGZk zOoxjepi&xCNXBrQr%n}T0x;mr@}=!OmV|f}>0FWFoeInl?4Ad*^ADj2b)%vKOutSG zJe}s#z*7#12&~ZKWr8D%2f_~|#Q+NjqC9-${o;13u&F z2)crkQr+tuZrSzhKxUEEn;;8Bli^U;tO7?W2E4vP4pbtVjt60A{f8L&S{b%qneYd) zkOqwD>_HfpxR@xDKLk)K{@xSddMM!lHe&50WPp98ctO*zuo1OeYUh911ui+FabE_wj2~$qBGp|Vi!1>z&nQYb&CIheqSep8?mEk>+RRI=gCk7)yX>J+7c`w0e zoi0Mwe;t2f4j#UX15AnjRC&LH*Qh_}0LFsD{Gh_Tx4H*eYTKGLF5C{kjj%W4ZZOCi z!6q+Av@5`V{>z=L2M0#b-%0%6>R7-G;U5xQDd=ETGw35wh0>i7@KHpu;7_IC5qYDb zz76~+kZw-Ad(X_?HhGUgf2W=QutvrmJTYV;NmE`(0VUv|)Gsqj@OC4SNuwYl+yiV| z7AovqJ4;4cNj4DPKuo!;EcdGmW(@>Yvgv&R3<}KWJ0J4{9DPnOk(FdNOtRM`AGv&? zLZL}!VOY-lNX<0>tzm~?D<4OAZFjt37~r00#ErDhIihtIe+&dC#cN(>_6!*NSDVZ` z2OwSH1l#*CV;PRb(=x?*cbLfo7{(yXm_}>l$p7j9U5Rib*&rj~d7M_x17cL` zE8qMf1p=@x59sS&5a?3^q4T@~01k@8TWt3D3!VvhxZeCIWeXAJd)q$7_f zp!zWa4QJJp2ug7+{vhKl-q%Ni5rdo3@DQgI5&Vh&%}DcIVxa* z-}F4eX;qVz9HX>w{--B@gSv(0DHPA-Qko{dgBCo1)5kr!6Tu|T&g#3d0YnDm6^_-9 z6(QkE;t=S)q$a!h#O4j`_%nzy&#F8~fSi%|YC1gp47W;|&J*Pb&P_FVVgwRL&A+g4 zT2vvo4Mr-&9yqwr=sM8m$%HL1c3>Ox_lh(-SfRx+@p~}Roc#8%-+$N=S9K0P zWR@s|clkG6f^h2OqE!1G3OrVsonDwy4!1sX5Ptz;1-!Es?*gdfX{z1fo+gC!Ovxbl zCRAJ~Y$v5f3{pe}*H#{PU39(#khNPv!E6HD`9Ta`2M%H)aRQ=$UZtV3V0tdxM(BZ( z7rccKN~%FA1xq1Zai;8~2R3!a6CL7ieNIS&3BHSi-~)dlJp|K%C$G`46Ht<5m7Ig#tXsBu#`<(8CB0p2rL9kPS=#;U2K7097&32LlGV z6s@gKkG6bB&kSBeK=c42uW?{7@aK`(2o%9df+|!9BI9+~81OQH@s!cJM$dtEd@~2Ga2j{q_ z*fc#q7KBJ7S|>NWvpKMm2wgW2!AqFSe=;nJU28(`Hr9GR(d<{5s^gdC0RBcgQM|q- z0C)0I5T*D|2_Im`L{^FFTcRP-=?B_SQF)#OnIMVy!q6P+sE?K8k(hZDd_j<@-}a<1 zeha}ZadB}>y)}VyqE>qb#*NXG+11-#m;NK9b%jd+-Q7pYmOutcG&K^&=%cj{qvqwd z^-iBV@#Q+-EM673FL5{?JiG|XSJ@7V!cz7LJt*Tl4o zo}zfg^FEj|xp-%X@JP$O37@YXzwr4QEmCx@o059X&~Oqo!yIVWCtYM`L_R>D z(Xb=NFPM=^XA+7Z5)C<3SW~9rEh-0dMO76?^?81y{kZ44n?oB;8zeciy8ugZU7WAF-O7E>5jUik%JxoKMQ|Y zDV03~!=FD2kuYWoU(A^LI#e(uUF0r7v9pTlq7EL`NhER1L?fz#;>` za7>oXsGfv)7naO%eBCnXnEgT{zW7L_lAJM7PQ|v3ifg^1=uAULBWykq=l#FFHlkHP zSxf}|9@PSQ#Mk)o`Yd()7fbxhmyR%F^&Q!aga@W!PDjy+0$X23{33rpR@}dFCHx2y z*JbvYtp1o1atcd~RvaGm$vb1ac*zo4nEU7(?3W*9r-x@5<<+3V~A1-HT(e9GAvzl{KDboAs zJDJdSssxv$!VOe@lhR6cP3rojFuAvM80fbt==l5#GqP7#^=Dz85OYCm&~N2uY)tvuqT=Wfk-0$g-u5ZF?@Tp(-B8DZs&Ja`?=^ zsJ=Nte#azRic)#}wP|~NP266u?&?B8@j|?&)nAUB-wi8nVTLLnUA-3PtH}F}I}WvV zxA#B(_KyfZPjUzy)@(8ns^ozWbTWb|!ZRGsLQ`A+&0LAFir4eACU|8=Yz=uQ|Gv1MvZ96;x z(9H>NMZ@%o&7A!YLQ`}tJ~NxCE))m0yxA@xN-C8E3KTE?F3l4Tm=WcCC}o3QFRvg& z(j1b>uD>L%JOMW{YE9x4kZ%6F$X|G`dY|TwdbzbL&5$8B^lwG zI4#8oN$bnK?@SAx)^^uo5A(Nz=9RX#EelRgl)B?VPus?Ra1j6~W%ci6$XJ2U&W++3 z^jG);68;7?6R=oHc%T|$&G6x|M2wP|*@vRl6Ys@~eB@NGeKItq9Tu`Qo&0rjb9>Wf zIHzWIQZGf=6ts# zCVHyD5q^CIx>3$j&EL{bnA?h`9$SuKKFScE#pJyv4;H;r=E*&adv;z`MTPZgix%0H*B-NCskoKF(^^5VY^*S3){`5?_|zrJByjl zVtzw;f_P1r)-?1C(ZnOm$`7X44Fa*k<(4gUG{Z=V^yiC1XI=uZrGr@45mKSPa*ACk zNOF2VNLpZ-XT3fmusbSI#XdnuE~08QwS78egu2Fno}65w!q<}vGqO#k9{RL`VS-ZGA#d0?v0d3#Hrv-oP?&(A-AKPmp=*%x>M_#2y42CDILE*$DwEL zlzqzSp~j=@F6K0FRB1L3FHLd0y`V(|WrMsVW7X$k+V0Rp(bctijvhPCeD_vzCQDC0 zSL`<4xaBTU)t-h=)zN|jQ^nkKhrfR&UN_Qi+#fbPa(0xGBHS+;686T^tk{SuIW&u? zIf-9o5m3nBc0LXqn+o7e9j_Howxj$xp)CaSm@qv|{f6$WCUfKF8vzP2bNiuJ^maRG zb0J_R*G2-<{q2T9vpVXn-IBE1@_Zoiys#yFNt@)b8+0jL;P6wUiNKX7eP9yR6fviX z2UXJtiNE+&%{!{4daeq^NmS1MmGl#um=K;{wB1*t!rVjq$J&s1+$^|jv=?;Md3u?n zfVTUs(h^^H*`G@WKb-Y8w z>?Ju8Dp)u?<@wovRAa#QiAY|5{4c!DCh&NzA4D73=2p}j+mka#;LRdTZkW-q=J&Da zw{XYB(52C?ytiFZhWH94?hNMA*qY*(fioB0t4Tb+$-!j5u!^O$VHt;pYi7pTWPwbP z#+?v`D0xX~&0Ud=@$szvb}tiM@+6dZ+0`Yus8Pz1LfJU9>s!L&Z|AZcfr6;Zlw{rT zEt-PR@+RLRsTK@&rRZ?M%ySl9RS_${;vATet?yFWeYS5$LPfYhu+d%uyB{>{y-8Iw zKfJncDRlI;$t>se1C8uaN1}DU^IS}yw2z;6_fe~2r|k%$B%!U4HbR+ z#_%$u4Nddm0#R{7kZ)}oW31nIA9gkYGKZSDvtBaHyxhL|?>4{Sh2KJo>FnRxd)+Qv zs*e(P7#G;I+ahb*wNhWMejlrRkuR64&c2yUw>^`~8mcN9FZM=bhXSKD)>h+Htrl07 zC&d!quJsRC9I|gt^Pm+$49(U$$F3*85bzBS(0_+mJk-HTcX_xK44YXC`DjKF&Oet}2U7 z^jH%*+zhIk=^Gz!KRR}K1$@?3l96#->X*Q4d*FG{`*(9|n+jN3>5OHLxT&rksV!|A zJr@1Ss3#qQqSMIf7#YFon1*?r7VHxZVi-~~(>Le|IW4?&^mH*_9o^m~5IVZD9uvi3 z%3%BQct!8#-+@l4TBB#f<~PlCD5{az(S5S*cU|;oX;)VzzwF~7j@mWP>L`$@s8}24 z{ZF)7ha)y7Onh%+S)t>7!P1DgW<~ckn2hQ__ZDZjPR?m`4)?Sc z-Ww({@izivfM2HC>W#`dBf@}DV*Wdvm;BBXyUnBT`gg|l<#LTRoiOq4w?9|+wvT&s z#|XN9QCiHKhxB^ZBQX$}9)#}f6B-84hehPOG>ZcITWD{r2EX&jFYlbPgw{ywp5pb) z$CU>~M~NY=J~M>;%&$c7mtAMGv1l&@TfD?Etf=g6_?- zvc$ZIKl}`p20hzrTg{(qmtu`o5?nW6sQ;mKvU^}ZvI<+@}Q)C6{J z`RUX)SeH-PEQ7@s5>Ycz;6y#3Nxrk~&uPIaJsfG=1dXNq7Cp02)X8=iyFEqk9iN@` zE^Hy&(JBIW$T>bH*^6Xa^u^llaXuO!WNQI$y~y-DJ2gdzNaAbYR@FM)^ptXU3(;BBp`0N z!_seX-D|Ow!Me@7qjKp6*0ym<+> zHYQt4W5nFC`W#)w2};!`tp>m5n9QWj9F|C^wA{09DHr&1C1bidHFSM<(%;{GIBMpR z`QVS&&4%gMEe?ZgkG&Y0m?4_sg&a3r1A#2SV~T*a*{Y%K;NgpvPnq-RNX^K1cT0v-vw)+g%{U;F5J;3SK0S%YQfM zv9VC3|FeLisww*~W67~hs{x^xmdy7{;3yuywWotF0ub{FkQAT#Uc#JTJ6Q~vF4~zXGKwTd*q$Kyj;2j_$tijX2Wkk)1fCBO z?z^Z*MyPb^i+NU~aUnyc7IVdVXU)wHMWX(QNXl)@k*%16RvcXNSFanFGaS22BDA{6tO7wyGR6X(xz%9!6>|!+s*A z5P%C&KG6FtSOEkg2c*v5yCAb*S234cL0da)3M2adU59@4hqdIdk}p<_-)AqXY3Td) zSKTFSRHQJqkK5FUk@ni`s+f;r8;zHM?=WuE9bxjFYc(`@^lN9K2uU(ZYj6F$iuj`N zozM#sR=;?U4lE=vhQH|>Z4P4^*GJfb){puxlF$;q1O?>;EGdQHI-JH3#%iY&^)f&& z@%-fU9RGua5?|=5B~oEygsIs+y42jJSm0d$CRzj7xrQRB_ zD5#=^Fj-Dqr#u5ACL^$`8b(kYsBrTJfg&{hqC6qoCj3-XKGBLjJ+3H>UVcfU!VS}V z(OcT(RB(@{u2oXipC)enzYBkUZiv6kzV?~wdnJc|HH$XV zD0JWUpaX?fT(r|;@BSwAE#GF0-k6-?eRub?C4bqpEIlRM#U({R>h2v+5%?})9MXZw zQ_|)X0s(Pg#MmmhgeVm#UjF4(^6_Tr;b==KNcZK47QbtW#)Mh2Z9M7yb+Sjq^7m^z z^Rc`)1}sh=5+mIYB_Y1c1XKS~6qG|IuJ0^9XtL}Vc9+uQJP9<*=UMgl8iGDp*V&2p z|4n3@JWWkcKi~U)zL(V-+smrQsJAqb@9X-GVtpreg~g#~Wv{&m`;*O4O7MP&Fp03{ zz+YbR1a~JYjMR2_bzfYt^!0$)yL=`i&-C>ipOYV-Gy1@Psrdo|dPSa(V#Ce|lUKM) z9GZ4jSzB#$aWlkuvw2lu`=9r4bkOaVpen{hKeV0uD5U3fjmCK1LQtsK@rb;41STe{}q#TeJcW5KaBCxD?YrVL^a`~sfUit^<}OHr#2mSd{BT; zBB+GX!7L-U%Zrol6(mxRmkkcI1QPWRw`hy3M>sc55c?j{Z%YR5E-9%_#T=J6b~5Y z*B9#PI8W@kdVf0oqGDn1TuYh5^h?akI01Zv`-bMs}fF_w5fJ$0&NaX4H z{hrLgOtqA@(t)y0SlqUZvi2HHlr)lj(?q80UVcRX^0=qs@Y_u2;h|F1RLwB+REa$L zbB>T;it07~Zk-nl4i+mDiB%P5p*iW6V&FRu_nfMh%aL%D_0(^U;qMku1c(V(YGprk zz2a09TjhJ_d-mf3juAEoCx{(9E8_-VO#S}t!t<{2-b{fdmbI&OTwXiZEop#`Zn@OI ztsgjTBO`Rm@2b>ItN6vDEaTpWnd(meiuYgiZ@9|l!?$7BV7_qxEXjoBYvzPuxi z6xg-XWAvrod{OhgmBJm;ij(k%x6QrwVJ)XR#lzX`#ErTL`E4!{{;Rk7(Wl}aF6D?z zw83qGVu?E;!L2?^z1Ej*<5*?BoY3^%FaB}NF89_a}!)3EiCHK$`{&c zZLt|0ta^S%8Xil>i&+a}$CXVTNiG!SfI8OPPjJ{AjFUVz&z9VIiz>Ls-*|wXMvu?(0=j?fSv~m+ zzfBc<#2@6WxZYLY08`;#6&|V-eAYj*dR|3xXV?4ei|uQ7pzRifA83ymicH-&k0p$~ zNO%c$J&@nUb>GHOCc=zq)f?L8QXmD;ySKoN6;3++0hFux!}ppGJny0At|=tmoN`{) zzpA)BGcn=96V%W1nfrc2VMrDOp9Qdz3Vm_#VhMzW`353N{S|Hgo|NtV%#9S4$XAv; z;qFXo^D^=^2aRKDs{;Fwrw=rk7_>^~3HcoxM^V}WRm%fi%^_K3#2!5T6(+O$k3y&= zNOnEoG}w@ulR(6?cA9wkFeEb5>uge4QBds4Atyz}fJ)2X+Cjt0fLg4;gxX7He2!|q z-cujM3?M$9Z^=C3?Fk%h=j-gzVzq>cqJ zVh}3!=NlfRc!3M9h}{uNoCV+er!@f~%6LA)D3qd{xNyWs|N9oeRgpQ1rNY9SsrybY zgHv@ck0q0O*?d!WE<~Iy-BHYT!ykCc__-6jwAVUMiy#Dl-Q)S)vh<3gWnMGcvFOI6 zdhxXVoPdRf3+7t`N%QqXRhjQ{N)O~@iQztRgLJ~dAj19}CwItHZnCY|@y-EeExyV< zCSFhM1aXU193E#8uU?3gD}@E)fX`H_<~t+5g0Ta;3sMzZWp-P3 z^*vX}m{taYzmpCE=CE#%Vu#u!%{%z!-ZJ0AQRx3a1u=pV z5r90qG4fw~n8d2Co!*@;>dVWLjomX#)~w+tiVa!uIk9bgv?5E0d5Cd%c))dVG;LmP zyR=wmC+Z$YXb%CFiV1nY`4-eXaMd+GN5f7IjW=!vJHW~+y^NbpO+;@$))5j-0) z$*a!r{#;o1XKp1$+LF|;U9TXg$f~g$9C|#9(ZzOiu6=Gj5ueXzZ3kg|7rY6Jd`;nbe?k??%TW5vxNaT^ z)lb{y--2Ju9`M4G#Yo@@e$GIG=@Irfqk&Q=)FGH#7m@(-M5r~)y|?I#&^{v>@oh2V z(v4O=fi8is)&9itn95B&<(ku2O+@0Ge!kFi@Jt*C1NWOSkB9SsRs9YA4Ym4Q3$doJ z zKXV`PdeFg^&$$b8bDMGgQFPBx+}mr=yQ2QkUXS1 zhDfMl4AHY6z*ysK{|20T>u#eWO?(Wa69<6HH{V_GZ_vU0nmrNHrnQJb{dOk1TE{Rz zSyyC(6-N2oHFTiUg5wXj9Q1tu0sfQ=O}F1xgm+l_Oa0e~ zdleaLPKO6C7Nn%CLem?(%rLgMM5_{5blCwlCof|bIH>>Te6`lY5Yv0ePrbPS{}4L= zuL6)!=NmBe*}+JUK*tM`&6J*PmjJQ-Tv20t*i_Q7@49M%NcOy9qiwXzG=fHZlNVZ& zP8r;S&gLg^1jx+r`~VSPKT2{%9&HprCki~z8?M)XFXu@xq;=t1xPf5uUasK>zxo;T zIFT8o!VB!(5<*e~{5l>%ovYrXVYy1XwScS|Kf6Cq%~HRxP*^yD5#Wcu15`-)>FQ4S zhnxV^{F2oeX+whxwqqu;K%L{?; z&E=&K9|UP2_nU-{GpfFuZRNq_M#PP;V=9wwFTqF-pmcAAZcYWa)O2@ov{&eIqe8G+ z06g!4Tn3CjNFr$l!Et%tIaF9>h<=d;W|>;1j(XP-M!KV&1?b|HVMY;J>dCHimR;zO z>wZ_OmUj~SG|Pz33g(%N|DJlj;DI7}{STr$uHJK`j-t8(CWLzVlEORA!Q9?4Nkcnl z$%D8t*#*Ky&D)yJ@QQRw~qhcbN+SY}T64Z_-2OQPJ9uEjjW7erPBZ^K~epie9h z%KaLQ!~-JP;fdYV;D*4VGzsP+%0;rJyUtfO(>GK|B&k;mUyYtPh(XKycfiE zaa+W>;ip%CDaLNF`?+0NAHh#J$p(S;GU!GHY*roOpC>+r0nJMDo>q6^bbj(j=#49Y z*JIGpgC(^>V%ypOQ55?ri`D%9M-zZ$l1>12h?uU$e=* zX;|t_`w}xTHKut4MZ_?!{P-j&$R`86eL@2jD(@xRU=b~8nA@Psajo50$aL*!xEqsWH%CHnYt zV}x}xn8(Phg2p*trBWCg(J z)ar^+5HRSR`usr#Yy}oVi6_wIiZqBkLY@K1ClRU*MkhOV$5h;#H%biN&jIk;ulW8Y zm84DLIlr@WU0?w7dWG_K=Lj zcqhx!eyB=IBj9dARr9$>a+Rdx!>7NKk_yNqBQ%P!gb3%%O4fN2032b=TQsgGt7H2P zMWQ+N+E-Srl0kJD(_-Y+Fw37dEF+DY(}Ph(k>!MYzYN~>0VxQ`-Ci|+ya5se{EY&$ zLZbqoDFWlv-DD5OX;+7rNyzfNSwJ?CK#=*$X$v5y1FS%r4T4g!?5bK|uHh}}91mxp z8(X{lZ?=&}4gUQH7XSqhJGB|b*=2m?!u@SKHkv2Ieht#&`xP+NU!zJu$IponYF1N< zv2OeAzjj}AxuyyIa#Kyfi|ZMqob>1=;PBt_D4HfIR=(}5qE-|~0HiUrzh8bEB|L^f zQ(d460C*fu8h`p-2C*)QcDe4ZlnNkPQ7z8{R?H)4aE@0V{wVXGE>1xm=;4F%%%tFSDSHYmTHJJzR+_oPx9Pp^#eMxct z!7DT!YrgJTXulkVhvE&8HvR8PUluVI0A?L;g|H=l)K$B!E|;?0lnJBIGDt`w52!5~ z&pE1xFx|n|^VmiF%N~_Y323T>&JJVVt3^J6&fzm)7+*HEBaOBRH?(cptlwI*>oRRj z0O^zNTYCP>@CBzK-rE^YnG4s{{>YhbTglz1AJJ5a?Bd9yy>p76B6i6KMyWy(re8|e zXP|~xs{Z3yihi+AvH@&jLr6E1yC}ek67KS}H5-ThkI2?has5^v-kDezn6ygZ1|*Du zhNShouLXb`#b2q6-)uFGC8%SlehY>csm0F&eat@9#>2>0?1U+ktoWSdZgPjTM}YsN zvJlS5_V#Ri4wUYFIG!RMdg1?3fkpb39CSB10?7Vv$T`rp2vyvMHJ)R{M9pfig>k2M zy;bl>*sb`CJ!|k5F(9#_2;&YPw%DGI9-^qK0n>8Ed%-?}=@00{TW z;n9ydJ6mZRPEI0I)K|C&nGOH(;QX4sk++0{E^`zr3^2*NzReEM2D?0JWHTiTqfANy z(kIckFKr$WxoCm=h6s<9{9>fmAVg`kO%uVw5MBK0Y_;T~iO>xL9 zM;cj?%^7(Z2WvH8-8clF>;-o6%#e@)&>LPmy(_bv`$5Y9XrO?`O)Cm+RymC=l(aE1*;F z;RVuH#%l%K{;hi0o|0W%2qabTO4rgJ(zeVM1bhGUOvCI)Go*R5}xij$OHZ!iAe)8k= z5bxw;WGR4jd7XMPrYoK}tx~Y*NmRF+=?WTU;>C+RIiSXXWzMe<5_txoJWxj8{^v4? zrFPd(wGmfAe1;I+8Bl=FFv&A=$^tyCr@hn6^K~y5mI)0|N2Re_BWG>|&Osx6OaFZW zGw4s1F!<*#uE|O85#kZtzH|ge#f}#Tv+duVo9baWT&NZQz4vmTaiP!99Hq#{Hh2t# z-?`>FD|*fmQVv5PXa(F%RP&%FNmQyTg|h`$sCv*iOpzlD3Fa_jPO!m)2SB&TLFtIoMK74os9GES9_j3iSL27Zb}K1LJM8*ZB=s=XdYh;1gW%iHJ+| z@JG*oK4ZZ81LIry;A7-wwO}hdAZ1FV0f4ty>@y{?SUY3aGoUT!Hrlq`%9c11{PaPkTuc{+a`1 zr(+1k^h${l#klalag|^2a+#P2u5dlr1;(f3o_^7`O=p%}Xr^oOhpTo3d1K50+MMjI zq%=o(S(tid7N%t`RPcHf6VX9fPgMuaO2xLi2GI1c!N1Qq00j3odMha{=g%9r^YPqh z5(QZ?2jYW!^u-#D5ZDV0P3Y^r>oX8&^CCmYh-}aJ^FMjXP<2%0- z0kK7+0SXq0MR3M~LGlEd_s-e*AM^^70Ym(}K1k;te5Q>+iT7|e`1CDB{#JkW zow;TH%TE396%#v{2k`+0m2rar$P*)qRe;7^aa zt4_!{)_5L96nf$rz$_I(M{^=Vn=KmTg>ZikUFL~Oeex#j!zq8tT^pFW2S{n07U<7a zyd5xo;}vd9wI@{kIfeGAIwLCyN9z3DpnR*#^u&DvPLSc3x#{jnVulMEv->AqA3~n< z0pa-r;3^~tb-}y5V&~W3E#d!G{pT{U1>f@3ptZdrhvLyG>3ca3?yolJWa&pt)UG)~ zQl{6fT)g*XmuK|jK;rvna9VZ$E(`}+fn%kqvstm@7qji}CaTHtpAj69(KjQfDP z1k7|K%ZH=4`UT{M)q{Ah55K8tYPF(q)dN533#T~-479OioJ99xay!mK?`dORM(V_t z`yKb6e<9i6LWVz1__i=1{FaUBTk+W_lcl-R#8j;QT@D~}rQbzJGSz{R{jfVBhY2CG zRX8aGv1$m*7Q2XVO$L9hRL^bqMNWJ!oG!(a(07rm2m>qkrR>vMBi>C0cI6@BBly9{XU)iK{xZrFQM9Gr;t$IW=Pvem8AB*J^0H*0U2X7&_gJH7l>0Ei{V37^xxgpurrYnFT z4$np!cVSZ=n*II0Rc?%(A!8GdGOCJ9xn?`AUA*e+7<1H3CwJ1hjE?m&h(AJ*9DCe9 z?cc6`Yh;D%Six|pf+t;gY#Qbwc(<-SCQN-EuJ)Kv#i>0lS3NN8V#ES_T-gh&WPK?C z=Nt(>o}i|IhbBpD-P=ji1qzKy=gDhJd2A(i5?2m=H!)#O68%hL+KV-d4UJWK4I;mc z)mba|mz=hU@q#f&S9MN4&AYd8-SmVM0Z)E~hI_sE#ZQzTTZ!Dw<#N6_sGf0;twc2`o0(gLE}zA5oKax98B$;+=PD|oUOqG{Z_3_q zr&(kEsc+CuAv>L%6tG^g=ASV?l)~S&Xqe^0J=%9G3pG)3O^rn@IujS)+cHwL!-U%u zlj^CGZ>Zy~hD}=Pc$dOVjLux(P&P=QHf1fMp0b6dU z-mIb$Ru3tx6uga<=ybHZ&tGh+f#Vs@4++l`M6Npo6Zn931FD3}LOAw^DbL+i+J30u zeM&^};v6al$ga+B{<^ae9zC9TpUl3*?OS-NE=rT0xESb4C8QxU!Vz3(*l!Nvz2w!G zTJ-G7v=!!g{S@w$^d=M0g_xtRSIc6!Gaz?MM1iFFAPA|J-{$#VJh5~-{LzIqKRFT_ zU;{qx1N5Ha&ccV>@s|hU&!@ghP#BS90AfP)baPV}+u<{LZ8AC!_zd&Lh3Cv1+@j=_ z_wGHmUsBa>f4ccQ*TVTbC0w`-5$LMsQ`phT>a<;Y_0F?X^QFNO&$uPb`bf;S_KECM zX;5}j-U9gqm+`lORNJi-n;ex}`J?1!=m&<~oaIxf)GLA-D)dEdM#m=j_IwWwtF2i( zYRQN>2@X{WbH-iF#*|v%%K$n12#^{6rnM$hxToJ2C1rYk+#MfGzRXD`0$cLc0R~@2 z%O=jNBRCM6ZK2e%|A_?p$o0wA>_SvcFQj;=Klzhx; zMY5p?IuP;FUL$^U)AspC>`B&oKAI6jwbND0d^mp*60?ncBHnZw)VJJ^hT<dM0Pxv4`w@a_-rjdq~j|bvZAh^+B02whHt)i zq<2JD3b^%li0 z5x}g^l{RCU<*J<%;eM~bKkYrVLOF?iM?0ioP6KZO%)1l;i5cX!;)o1dWy$8C#J=olVGQA(g88n9lkt|hXWcVsv;oCuLx)F>B~7y z3+=x@q%u`#EPP6(uiGOT5AdkrR6vd^g8yC`Rx;G2RCTgC^E~(gPI?;Mu&{UoJ~IWM zA!+I-$e!gxHCOsrC91PLDSO|b@Z~_tm~EeS7!tuJSwyP1SAFJ(E_mfjpRvpljs=$% z!t-*Qg9_>!nyBG`8{i{h#QmVILp`g~yao5M?FG-YUhh&qAMR1XX@C$@frT(9J7AuA z#`Z;3WF;lyZL#stgmV)wXw#ki5Sv<%VD;eakl2s7t@l>Gk6D%+_%tbYd0k;I|EyIy zSS#^7&;*|6)7V(2FH!05dr*%vVD3EKHYgn#aUrA&{(h$bvaJEg7X6AU`;mhSZU}RX~ zD+20hYxb(K)=yI(bj0SF@CKj-b`X4qTf%|f4$@2Oix)=88g~orcZhSS+SDD;?LMxY zjBbRDTxiiN985aXDJ$;8d57JuJ1=FKZ_I|;RjpL|T{E(bm-mcY>hy>MB{)!QDrLrc zpoyJWBjSD^|KMD0vd-&Zb|3w71MA=hSi6E-Sskae*9NNJp~t8)N`UcJ2s~2g7to=( z0*-?z*TiUMVrhvG&R^QU9mN+`bMBH~ zvw*o#i3hdLKg~zfjf$7itc6Unpwzydt~7K|E6}(K0?R_QLtK#FhtsUQ*2Q@qB-U!^ z9zFylHd&iYQysQ-Wv+cE0Az^HaNNUsvau*zu?oHfX$qo;L)cSv5yQ-E@r(vbB|nKo z2+gE2c??%AZ~#~VdPGhua{4w3Kwrw;XfHQMsgODg#{TNS ziHXrP4T*W^@D;k^PH495EUK z5C>@^RptRKbNuypZtjT#pTsHyT4U81cdvmxC;)r#>po0!8W_iv1kh~aY#@@k`|^Jd zrQ_q{;1VJwhb^NYtxZ92Ge~AxY)^^yq zo=6aYmuS{k8S0btXfDdi?|j?5u?+r=8SDaraTJ~x0Rb9a3t82@AKwEWyOk%IY~}tY zQ&r=q_)Ii&f6J&%J4`bP_QiUMSgak62y0{gd-s!=%QKta;Wh_9aj-Ejz|pxoz!42; z#FQL4;Lly^p538u{dmi6NUM0650(0yy!ykSdCe5|>E>sAq3fyaj|P+u*+CtrRfN;S zk{^$WU*_oy@Z*6`DZ#RAypd(4!m`|2TrAFmFj80<7ZpT{{L)dU>qN^3JAzl(Nu`nC z1fwMxq(sC7$88f?+PwI&dH=4+=lWK&dyn3~a7lyt;bwT!L{o!P&^y6-;(S`46^`1; zhIEZFEf$4~aGFy#JvPs6lyJgm=YWS1Avz+^mdsQ|4{v>N0wvK?&-y7E^ufZ}p$2v+ zA%X1hI_%H}A5dP69{WGMAR%r~q=iTIZjq-PdT#j2t(|0i)4zL(zf$(t8|>h@H5vxGodx0cBiN-jsTFmS^NGVdzJd|U zJ|RMc(Y9yRTbqBwPA9&8vB?8$vD)xunmmXm&>KQPR2H2!wK$(hZBo|1SPTCt5d!I@ z&C}p#=wlSIJq{b+jj^+OGs>jNQ$@_>vSm~W_xtLj;ilW1pgMNDN+Z(SU+rJ6I z`4tP?zO_r?w61p&!pUH7^909~ew>**!nlzSQ_?%aE4lKcSe#{9a6cTB5(JMbZGhtv zFQotNjX9CRi5}_91&d0tCl{BMZr@ay9wGF5J!rnyUZ$V{E*!{%@w<=n=;^-we~yOC zmN**;@*ybjX~a;5;LxehvS{-O379%6&)*A6`kl+jQai)@K}z)*+i=3 zdORq#HU98Tyyc-vMt;Jo5mog$W42!q_M$q06%^eKZglC?%>9ZMY*PK&ixlBy$@6eh z_7^sVW%_BC64(@&TATWVFXHY5ym9@5&e^tVpyK1(J# zY`XDr{8}z}92~G-sFZ`TX<)48lI^pJ)%|6x$0}X;r!qb~y7q67Ba*{xA6bMHd?IR~ z@?+3g>pX$NU31Zm+L)XSUp{@Y>2JvU7lDWy>_kjp6qrC>!X2W5)an3QInWew@U&z1 z$To5Fiip>u0tN#THN%K-anXLN-{U>63cJ0ClHtj!ml^gbk^PH z5&_{pS!`o=Q^g>T`rx_yjwsiIiP;$fLme`Dx{9nUHoM+;dCBOv{~~?E&W6(PJQ}#I zD={UCwAGZ=Xx356Z#Kde-$#G=#1}1<=CV)yIiclirW-#9wY8b`pr-S;5r? zQNl)&aStxjY3jh4oCrE&UeC76z439gPM49)F;!Ev=9>>IohET>JhSdvbzEos6nH_6 z$cS6gw4&q{)}N}A(dogz;UhBbf3Ki$`(QXap2A0ncQ3dedMgA_Mg2(}R6X)ms0+`F zC%IVSVY*~pEvEQXU&Y}1SIgf`mWS^cVJYHNIMj?g%hMnBzFxVB?>dIeF{A&vj#C<&!uerKxDkc8=e<&M-) z1N}ykK~3XWXoLIo#A?f0VrfDrc_$jR++J?YK7Gipj#Y{xuK#%|KHjl2ZWH$LxIJL^ z4g(%S5lnm5RTYUC3VL8n1{hW%s z8=s>1Mbb5YJ|>qcxzqep#;oAMA!fJ?9kMDExj8TtkQ1w~q{6-N0_=sTw5_8luc@GQdVk(-j^Ln8uwF4kC)7|<@*j7N^2`5h@mHKK za~5NvEPUJ=`M4#-s@c}c1CJif`UQ~f z6%{~j3N_Y#H%zYhw>a}QI4{+<)aJFt7QKKZRhBvyV43ay6szV|J#B{F-HM98)8D zzmGOWYWSuxPPZ0v?1-NmzlVuL2iHD-O4)eKa2`n0Z8~Zo#D4|{C>J>CyC&7n@u$8|x$8Aa%Bxma_GssM{#-q}a$`)Ox zbY<_*PnJT5dgTQTX8sgR40**;%W-REnGj`nq}O+v9hNev+;`|AUtO@}6czN4LkC~kg=KroGqCOZc4x4|7O~uBoK4ps+7fd(nSW*f={kGFP z+rDPws# zuI37p4})_!>-FgbyDm%b8zZ+F7A0rQYn$ic&G~olDsj|Q)?38=&Yd}4$PJXp)KnQt z0EJ^sNcU^(T}Q3==~9S5d{;i47g3`v)VFY$^)kbImFk!K9hDq$&($A`TstL}&H2(J zMULn4&ArGa&kgl}T@dI0(2ix)Bq5W_wEL9d!|t(0oyg%u)R)12<#@+=+IKZRuPtG9 zV5NNKmN>V&@}Ve~KV&v^ngF(>V$)OC5Jd8s*jx&Jcfw6w=Ub8ClIy3zVSa%gLW{M~ znN~+%zMG0^v5L0)Q?qoW%O4`f50a! zFxecFij+`DzlX9LY8gOoRneA^3d|4}lhnT#>U_Vmotjx&OMc+NAst|E~~I4DwQO8M`vZG=ZSD2DgyRjRni>&PU3EXX1Y5e8HnPeF`?2fnG{Wv@Xu4zFu}cgY zU$$i^tnTE0(~#i#k(V0$tZO!l)3dRLDChvkzo{j=C2_rN-oEJ>9FZc_zR(`uWfH>C zrjV;C;!Q0IpkR83)GJ2hj%=!il$uk?OCB2g10yrH_=nkX-A7ESx4;ra*8Shju2l|eFA{@hXU373yQ*BuHTOx+UdwwfMWY89m$<~NkMXAY<3pD*w=uTcfI zgGhj>)V^RgaaZtrDEn`xD%o54{RwaWbY78>SGx33-W&Po;X)=fS;cdE5|(!d&v4LZfa;1;v3zw?sV(zK7 z993KUXN(7M@jg&|oL;&tE79l9t2@^r_$K9H+t}Ai9icO=v)HnpzX4s12M=f-&M+D4 zE`1f1Dm}x)9C2!S*>dQBWS)^F?$C{A0!x^DgCza_1qeAMAmpTDHG<2`gj`|L5ff#j zm^?f;DY^Fptx zgC^(5jgw$Rxc-vcn09RAWJBDMP#HC`0XHou)@RvywOy=nP|R+hxmF?)KfKzJ_>z zb)3ylR(p@4b<$s2`5?=c&W(3)R7K|rjlGXlGd91RVZLsvp}YBb(A@QTp3_t{XHZ>4 zLfgKu#Y(xqS?k9UV|f2E~LNn5H$EvSpmvhwLc_f)H+pjSF&s@39lLnn!F;0vkpDKK}; z-By=dpVTcW8`1iA%wjF651tw)UAS&TmBgLYd>Q0o*|XyL+?BrKJ?7!Ki=1AUX9uif zRsQD3VArRgEV!Pc-r>ckg4*~P^92UKmXc}$Gvvq`L7=?_filCz9``E zVGOHZr}%X~`PA}CuFoF<&$?gqy^c2rN2e9HYSx9Wl2%Q9Ejzu(!;`W7MAT#Ji{h{I zI-HwsO^0_f;qMGfefHz=>O5N$iP7$q*G!n4e!%07N_kW_mVmBP7sNIYs)`jsXRp~| z;_|A7aDGX`BCMD~yY<`BY{pe(&mOSdrrrNzSio3{9kj7~#$;%k5;C!NR&=yeP|&li zwkE7SYg${Doo#g8y6)NIDvx`A4;fp>A+ED2cm7q7=zb8Bo#VZzJ#ukFv{K;^eLKs? z{ktLT#`Wuhk=R;CF5ex`mTUSFvgtPKIp{_k3N4ZP4BX4GO`F5xf%lHQo>FH-t!Qqh z7&Ffni+H~Ejyd2rbX{1W_4mtDjj|D9+r4+Abh#Gs37+Lnhgaz7Lh2ft6y|s**)1_q zd0b)5DCP>e?Tf>^<4d}&3)nh;W%Yv;^RVW*g2sozVVr@E-&fo{GT7a|GA{OKbu9Dt z<~aGDtEnk|VZyWinJc#3_BL77uDR7(=*YaE2f59dIL&%M7Waqj*2})h3KH%lFAe2( zZqM*|eNr*oYkun#fKhSLq9jNfW>u0-W_jmJ}!MdHpBOmc9KX&54d3K>D373 zIO_>%j?69}?W z2Iv}@3IFKCwJYqXUT|hss8nQXRq4kJv08- zYl~s)f0u_i^fu6!+wf$d+7d^26uzv0GwZ`ROGmM2bEaLC*b1lu$2eAGHuGrx^-d{P zpS+tnnaY1^JM&K6_Tqi%`}e-Re^$G^`h!<#rg!K`g3o#lrE#6buL*S$c_DDu_p1^Z zVzaWONjg`jzhr#fJ1OUxz07Ad@KfgI2JTqq-F;)3qW&stanFW*iQ1cWZDkE?sSkb? zGWKUMDQ&jR`u+V)degDy*KfI1e{2AX(oO1e zUkTQNs}qEGH99B*q4-}6nvc<%!rSWzvAIm9`cU)ZIrdmZKLL?~3-8|AJyku}oe>nh z9XhdamOf^1jAL=Oea7m`xSiSUnyAWHFDicLW!niz(SMr?nTh#0Y!fA2cz0;`aZSsXe;c0D-7~$=YAYEWoMUal z(}~}>*}ZJj$&;+F;3zFf>{Yt9XHV<>cJacS?|1K#o^_c+`=;%nzt<*8Z3(&Fy-|7> zsZpbz8#<|=Tfx0I5}MLJru|ccQ=}0ax891A*$CoE_$9m8ao6~!A9I9i-D%cIUuzTQ zS=U6FQE89gE?G;*{!9Az54+zy&YF_A9$WXjCC%aU`CU9F%Qs*~n4g$d(>(Bm;NZ+P zqTZ7(b8|_g&$!D2n=>xTRF88H4L28$nlHqs9b0{Cvq`}f5rQp4(0dsGoK5S+n8YQ z&$LBV9i_1Z?r2#Jo}?*o%>EX{YV2&=ZZ*%WhmS51wm*_%s-~n2JMz1>RQYx_;5W6^ zAfHcy*0#sxP9mifir2tgBXunsmvJj@6n`NIl9X88g_Av!nd+e~n~yY2AMDL$xNc!{ z#M6KN6q7mL_Q?wLX;0zw+E6U277H`f<+C2F)6585oCv%?Y7|Sw`RDJO3A05AiDGxR z20r!&&PV9<2na6D{bt5vF<;-k&kR&5ivTmCn7?kVz^Hzke|?vQ;ZrTxb8V>&<-rPj znHA1mmio7E*$?%(Ahd09mkn3VH6~WR&7o^wx0@mxkz2g2ui(L4GIL~a6~8ec2GZ6d zzR~~Kq+)FQK=OQ}yk;@q#vUlX9-<&)Tu3Zj4go~TL;qmI;6`HJ)IRR8r0k_Re-@F& z$GHXVN){80EDNq{WHZVhTB0j6XQp$W*UzN%b-GEs;0rUzP@ zqT1KSHAUdk_c7tRO81@c?szY=V?D38aDy`6u(#g5+a4N9@@>{l-l}iY=Qw9t>G@LA z-N}EO|I?k!9;F>W=EhNm87oKw(-*?Is_sbnlWK{+L3_=iv;OY&Do?nsIwan4sq6d9 zJ*}+kwmD&*o8TmIhj-(}YYW2xh=}5e|LOc&d~Tiz#yT;np*f&RTD%=D$7X-Hcs# zhxmKaFUCviW%l+gtHO0r9r{R}0Y2*coVz=ux^l(Qwp#fuT-n~@^f69iW%}A6&mQP49|O_7HCVgt5+ggdS;i^_vor- zN%3Saoa5Z9<^n5P8eI-q%k%AP^F4<4!$ZtUS4T;IP3gue6eTt@C%d)`@v}GBbPdQ4 z5XgIZm3er{#3@xgp-vw+t|)%;h)5>uHw&RlGIymjEl2a;s=p%xljHJ}!`j2qz1MH0 z>PoKddGIe!TropmFsjyAWSeBJ_>t9E_Hcnhw|aRvQON8=zi0Z#R5VRNf-1|ME3;dy zb@Rs;rsTaWOqhA1lJPygJVUuAnXEt7FWtCNTG)+<)a(FtXn5W;uwx|<-C*##7CFlb zIKQL3pGqJnld_ROz;#28yzu>ni_}=lZ8@xSPOb$1N}C((v1Du$eSueo%>(8x1idz zx5b_iyX4;O&eqaDz90Fw-7~tp!+e!S8=TbiOEp3Kj) zH@^r_YH<+c^7v3TbfqIdNA_?$3Gq$*au|wwroB zZGREB{-9;wWZ;+VPvajP%0gp~;KqdB)ZscQ$?Qm5tz2krI8Nh#xMF;MwErmz+)zx= z=Mf*FQz~4fMl@7S3!dQ7Z^6p^4T|&i50(kW%t^|(mVUg!RhH75m1MU4ZtLgoCE|+P z>QcNmV{`Of`7EuC)61vN^!;!(ma5tUDtH${?!d{<)`nNvB)>M-%AnWGd*Yib=d4qP zx1$3H(6&zAc@YhSK@J3sI8ZO!zt&%!*k)ZcgvzBolb^NT(L@L99`; z4cl-`cOk-dAv7XP+e${!cxE$HgavX*l|Br88Fe+~LaxL2k72R&BsWW3EdLf4E1SuD zsR=I)a_(WjEGO|w-kzd&kFt#?xx>Nv;Snq1Dv}IVdPGUK;&+dl_zJN{ zv%U8l$po&)hHIQhvR2mIL31B6SLV_$SY}PC?#N{RCnWbp8Xd$522<)sb#D4~4n&pw z%zOEM=WJZ!e&n(hfBLlIOFKqSCXtuzFPk&b@3*AZXE)bBmoG=OpY-x-4GbVlP9+=+ zaHiHWl-~FP32BMI*Uv7P=x_}bQrnIm`d;EkBg^Xj5zEY4`%xxQwAPibRaH(US3HA; zGJSWOBdpo#&pz|d;~mlj@piOg_yZQouMIPX_tds6Hgf~5WoZv@6s|tm7}j zMGx_c`||SNxG39~Rv2}%#Wy0x&nBsT2oi)S_}CHN(lk);#1kJq{Jmt7D1&^`784ac z1!}@9nnKkQxA&gViLIa+#UAT^%*__GRae!O8`tXeV(V{lVcjdEf$MTzNKx zhfQp-r1o)%N%J0`%MF5F5XOZ#cPu({IC(>lSwD``O*?;}%c}CJ-kec>)k6P^i}q@t zxQuLyK6tHL^(w7f+J4KtbYSmW%EziOt7b4h+|&s_1Pv=7IJEmZ@Sgf+(EP3p5*c47 zT)y}mMcGP95!XFad%9{uQ;Ip;#DcKQ;eccLJ=-ml+OjXkZG*1#mWmT8t(@%9=5H$M zcP~EX@cFjAi^}~nGxqTV**cQs3w5KayssQQPXBtkDy7rgk)W@TLfTmd2?gu^UE(Oz zIwW1COMBrGQ~48q^9gryrO8;)Wm9~|V!p3MZGdAUa4C`2e4vW0NbJ>q{&2E{$)kGg zmfx1_O)*sNXpO%-$88znftjhA{(+mCa?%^ea9v#RGt0fCbe%}U1zHd(=Mm}>Wyxf! zhKh>zw?3*#aIMQFEOSo3Q(L?IGXnVqxl_YCcbKJZSM3NVTp4u6A3Su=FD8`J-ue#F`>6; z9^$ieh-50dBSRIvT%R0GnzWjFZ~Ha44oCQvEV38Z{L+w|4W=JA{S_Do{*WL8HX4+t94=05AP~ z0LwYn*jF527U0jql5w$=9&JbamXSX-qhF@c{k&ddY0M)(7SKcOti<-7WtZghC2uEN z^WSo4_OGG4oIEAO$t%=CxmSg3)CcK$R=vMq8RO5%J$X{??(vaSKB?}ly}L#uiQ)|= zr=fCAx=svoe~w^diQ(t5c$X$W@Kj3s&Ef%v?a7_}9&X?@!@A-fq)3?=nXBd2qidA2 zXGR9wp+3go>nZ6d^RC>^-*HHiWvJKG^ZbL64%c@}z4@;KWf=CQ-WVGw>;2};IrQK1 zdSIn)y~$&drfg-vxN8ND(hj*-Z6pRZff#r%a+F1uwVFY;8+M=E{7B>TB3YwfY_ien z+*X2}g!5Iximu4A;HlmzsjBTVD7N(1*Jb{3d5;v9G+J8RUPwzi*a^H-_GIg zq^+8Z{gV=ldL+q9X0Iz!!*Je8PAL2pyZ9^C>enDu&yia{k64=`(2z-J3F|{(1uQXf zb;5S-TDTNyojB+DRF=xYQLYzGzI1n{EPZ{sxBjSxtIRZ%Nw`_*&>Z&I@{(-v9M5r| z0)4^EIxJQyhPz5M2}R6pF3K~-KXpT?~@ zr8~%K3qh?p#znn?-Phz^(@e18jo9$;mSvgv@D4M`kQxaDg6=?N{hr4uTak&>W|54$ zm1nHIKGJ`D|Js z;8md$_yW^Fvw^f^gvhYIT6_6d>DkjcNuJ}8a`Dq<1Rp$qie;czu!LMiP~)!`_?l?8 zRvg|TiJ)6Hx$w@cN#9y#iJ1iE};!O z`O?JS1n5NZo5^~jT3@xvQv;inOCCzpQZlE;o*VmA>9#n3t=ya6Q%G%xljDrEZY6;| zJRlq(L1i8#vK($$&MJXG@NuY={KSpWPCEf%AuGz7$2wt}X%|VNmOXW#{l2_q{z+7- ztP9~q;ohzHcQsaIz1d}7fY?VPVz`V{J33B+&aZ*%6ULKB-4P3^Mq-r2!3_V#v%*nt zG5bbs@Xj6~J9!^Uu-C*MoKX+l!nj98-JRJ=(k;`h%O+lebzuX_;X@{sZ)Gk)lS=mr zr4aF%;ASfIcyWRJD7n>^XD`4My{YyRtiWJ9o z8(DuAPF_-<)*g4ahvQrBE0&oGK|5Vk0Wx6g*F!zG$H9C?la*!@w*p_ z)QJOa^wJ4>jmv!e?Ukm;!REP+V<$;ST5{GEO-5$1M5TYm3L>_)(DJwP_xLw`xF?WU zv-1S9JbGB353MVG=_f4nm=dr4Vp-({8H8l>DsVk>?ATm9Sl)G^UF`0q;P?q`HJZ#WGd5Z)nhJ zeQiF$coJ=RoB zN*1?mnFdYh7a&0SgIn<2I1ml!S}eE{I{fnOdAu?>z0|C@Fdq2~xPY0+9DhoMw11)eT6DGWyoDraIpT#6O&Z)DSfzzU*D}cZ7(!^6@4 z3jU|({*dH@YWQYBA8(&#qTH$t5p*BfC{EbvPDnC_rt82Iv`TG6w!tur41{Y)4}lQS zDZ_T4ttt3eGLt8hX|UHox{w`t;JUpjt|AQ0)Tq=&6Z{EiXF)9@-Rc??FrHy_iVKhV zh)vqElnk-UABwcaTxAycG10fW&7#wOs*|ztgrX6|6+ZJQ6e;*tI!!v5(c*WNam`y6 zDijG5^Z0uC+=pbbyK3!@+CQdjFrgZl6%s{ALjN_uq5dCJ-yH~b{Qr-`RaR1TS;e8U zGP6~7R#YUiN6N~~CS_!GXYYz=kWhA<$R45WaW-XBHs9yl=kxvke*g4O?(Q|7`}KG{ zpGfTzFAVGY5O6cJgA0dBR7I=zr@VJ@#@yvL$3Rjb{er{J4&3yZh=z zvt5d$eTVMp5uj|SUL2(HDgED=qR{{F4!^-Jq{G-LcI98(qX~>Tzt&gJ_wOx>np^z$ zp7G4E*;x}D%JZ0n@f6$#%-aE_6opEG01BYL!Yo!pr@Crc++QgCbUld)d|k!b>fm4F zN{zd{Tg8PC|h|o{LxtSZ9zWu zgTf~l9^Ye{;Lo2H_kkxt-e9F5+Pj=xgiTI4Le<~w8pmmCZ__EfovM^=5t{gq(g^{} z1H=qDWZULHRGR^Y0Y0@l^>+WMmzYB(k!(_VAgOm1OS`ubJHKv2oPebPqXg+rjX2&c zpx`)(0&YA5Tu%Y2k7-OrugKSjZ{RYUero&lG!ol+uN{?L>bLLw*p(14SX<3rRg!uG`6Scn~sB`v6I@MQm9oV&xW5a z>aQ_!f6vlHrlTS-_&iz|819vdfd^90?`y55`u7qWg-WNL2$RsrQ~k|e@ZA@9<%4O4sXWseJPr~ zxrIlGPQdQAwhUq^#tMTo2 zME^j?TNEj;vuiP;#qJ>h)AD}pZlS7~tojMVL$OsCOU@~H(u=)6xU zcNYSz-oqo1Kh(A{kki3{wb1Idq^EjAhrvX6Gkw2``o52pOEy9k($ywy02~;+;2$nd z2@aYIK)+3oWvXJ%yY$tNd(}^jaiKt>)kNgUs*FJYNx0~7d@m8zX#sm%22_Nb3|pno z)-NTnmUi~|NS1!vzt`&EO$#+k9O@zJBfvw3?ZGmIVxtl9v`hl=G_0G!?Oqa!zAuty z%~*JQDwJAXzx~$T)tQv2bIxZwPz?a_rf-rvT+?^J6ET&ZN)%1b_nO3dU3_h#@Bu8>p$RqFZJ1gxJdpJ@v8Ka26x7m^N-2S>EZ;q(MF25Gel z@&y+8BZ(hcV`wy^0$u#`%9VQ}x*q z!>u+IuoxyC6%!n%1rUmpI7ot4C(V8`ulp7^(&VSG(z}m69_P(l6?HC+N+WDL5t= zfdyBmZTJ9tO459f50ppvSOwLD;e&E#mGv_l2IGbf;t|>x!Be3TYg&kqYs8oJiLTuS ziX-n{`qQ}#r0YE7sr^^KenhbSx#^O>dfThdWL1E)c05YM1*Q@D&xESRBd0|@M5zI% zHF+i5y{aiT-3^xi9Ko@doo$U*s~ymFIWJGL&kH-?8Lt7=@I}tKNm#7Pi)UGrSp6gZ zjGCK!glC>Mj@?!yO;PtJQ1MgLREtg4J?A;r+rDeNqMQf z^BKnlQ|rNSjoDELso>Mac@oVGRJH5nF<$*DfpqAH3WhOWx8X=HB8QKV2;}27-{Yt> zZ0Iq(Dtt0d>yG*GpnioqMc-Np_!K{Xu4koPxeYvE%gNs$3X6ve1wgQ`HV}}(puaNngM^u3(B>KQJHviW8FCp_R2;*krGO-#`A?&)a3W8JuffBEX( zf!c6i)gs7AC_|UdP}MVtC{yfGo`>_R`@HKU;$oOlb1mPttFzgb-Z+)@T)aWdiUXH% z?AtO1dNt0&7zFrMBQ!rz833%q#>KklWqnW?yKKyr>ug|%J zD=8MHA$M?655ddlle%58UiiU;K{I%jTq>G4)h%QxUp^$R%{Omj^QfolpAR`gyF*x( z@bru@JUA}A2SLV^(kufv9k10qU}9}Pss|qvOyO4YeBhVpmoSH|x2KRholEoVFre}1 zuE4>^-h!mCEW>m zLn!#8|IChJ`);4+{Cr76|D&;CRM$pe{_J^EdU_Y+w@wQ?JzY9LJ?Q*lhkJB$s3OvU z6*4$nfq7qUBK=gH0li&|Yx3ufBTirnK6fL@v>j3esspOUp5nO5rm;0_OZ@yp|7T&o zh8hix62lHDJd6l!&+x&C`&3R=z&Xl7q@-VY>CW>|tN}>Ld7>obN!Yf3 zxStuwrjj&3I~yfni5bU+)O@tdLbC!kJFCy7y7xgs{A1?ODQj{ii<#e{-lSZX2lr}B z2oH@w9sV4+SAPa4krx)wk97V+sUvgIfQc|G=hOnQl~m+>1M?N+=(7)3iIkR9$-rXSkb0na zz+!QrZTjh_OJuG(d~R`#jqOUZSgU*D=YHysnECUE=#M}o+=u2HP((mVWQWZXiUOn$ z-BaW8xV-tDZYeb!Gvh|QUKcC$BrQ{Tc zZFZJ7e|LG|&g>`{$g(o}o;W1_=sZfs4nH=4sHU0?BEt1RUClu6l9l!VFdf5%B+Er7 zgEtbj%oSa(Ml>)5&oy>G{3cp*W|{J#0`KAH^pVdgzk|CA*{sAdfMvxMJW}$87tY6e zta}eFik!V-6Red&B{gzznL3`Xd=qWdBMv_TwCBs zk^Sp>JKtpnP?<-}-06u2Ep_LC_t!lm z4$dZN8V^s+FA&i+1G$(XlSh99WUpd9qR`PmzuSg!R$cU?ntk}0T7Y;?SP_dMLfP~-|^$Dp5$9u0vZk>G-Ij z(;MtP9e<#o`Q(dik`2%J1pgjH?{6I@#VEKT=wVBD5{la#?>PO|ml!6MZ+h6(mS50G zPh;jDhj^gt$5MbcNY5T};)&4qO|j4zw#6Yge84>Gt54q_J)|nVb~nB5>H`{?1&;w4 zXt_~;;a!}1S33jY6vS?{$f;`ps}mkc>h(>I?2?v3C6Hdt9?79)pGEBt|DU;SQM+bG zRte$wHuW%w_@y@FZR0jvZv&yi8Z=n-_rsG(f_o1zCx3Zv%)c}87jBtPe)JiOx*9?nIF zTAqSz2A2zt&Vx*z#5MO-XhhTJ_EWLzb{frJzR{-G%mf>lB>M&35tN=n-Aey=E?X;A zOO)~ZoKszQXZ zXXv?#V|1CllKYMymjognEz$2;2&k~=t(vT+ecC+PEmU^t#ZS#}`M- zr&aha8YZu62%D>*Iv&R-Bpkj%P=nFHPJl^FAkije0%=Klb>Ez{*LNz&Xa2w!$g4ti z#nAE7iLc<1*M(EH(_b+32QM@YiD;V!ye^*F*kd|Y(#Yp^0!23y z%5-q$2~R^D72#_KZAxt{hO2U;b|x_j8@yieyx-t6-&pkERI%;dGt*Q3^mY#?P2?pGYUiAs|9W0t z4Py5H{$wsZj3)Nv#O>HhSr05zhnl|A&s~d0QmHG>b%m#Q_0sR{k<(S4wzT>4?@`ma z2@Z7WFOVqj^r@+G|KqrbH#`#bO?N=3w8;+0fKJbmG-k9kXSDZhz%!T+`YNm&ow`N@YE%u_sj#gJ;mBf}W zcdV|OBcfg(1bxa>>oKS6w>gO-!I`ST=hJT~PSG$GEH`eE)Vsm|X`8IU6Fn~|dy8#3R&`uj9m-V(~? zwYkW@wWhKzCN9pKty1b}AkUh4y3Xq?l{MdV1ZO)}Z<>mK7CRqRPAx^TrYPg9TuX8J zsyUCxTJOp0i>IuePCXxtW5|#eUMk&dXZ7@JyD77~THQBXC31E2O;*4=SfDXdN>Y6d zbGbt;k?=N&Fnr}5loj^5V^6w#Zy0IWwQ>op;TKzBZoI=6OaTb!uft!5{ss)+8*x$jDh?0PPqjr*^irSi;k4_}$!pv&1zD_y|bv@Y~$S_xCH3Yf{_4cPompVR&9nb-}> zy5Q?ECVPJxb|>bfCrp-{PK2{Q=598n7qM8?_*lK%om#o>no8UIV+dFC>tc<~()7n0 zsEPsihzSqg3zG)KRp^d!m?35YiC(cw7YN?$r8JML53fB=>*cBilZfM-9g$%8z^mM= zbdBk1PgPZ?>0LE+(Lj1m>A?a=*5GZzMEG|9?V+U4)ZPg!HY}cOChvEp1afv>8aiC# zWy{`QbUYCjOjkN(y*we1lk$*Pu6?V)^L#Te+rZr*dkX_v<2yT|C9>-&ui`G?#l%Sy z!13oJN{n>TRm3hpPG>{nkJf_3x@&Zdhp)zaPib;`rs|=(rsx^gyJCe7-<20Pw`5^N z<{1s%xo?<1HhwJ=rRKhN=@MZ-zOgE%HTk!F@n{nA)x{kAt#K?Yoxem};J+;HzU!JD z$bT7*W4YSHC+N1}2nlWF)bv*#<92kj{I~VJR@VN3$|S?31B~{=Mb+Zpr-X(*6n z4s#Uo5qjVwR7f{ST%a(x{&^!lnJQl@rCU&nzRz1-b2nI+(c4?SjWbevxp6c@H1_$L$D9%|9k%oQ)(E3?2AdzI_|8X=#h$WuMyd=y-8Hx;dYz=JHp! z&!PvMiy$I7@R;#08qd>rxMA&Y*gFzR&5-Q1kO0ZTsx;MaQk(EjF&To_jw?Ov1y`u!Yg#>j7iB z-`Wa2D+8awgw=Sh;bkO{aXyQPu@zE{3ZNu9BexT$kx_Swy&|zc+?s5sow`%f2!PDG=Yz|BPR8_dsO!>wI1=CS} zu@5E&4QbU!x_UprPx_dt2r^$cc`553GIu_+a3enD!uL0D=u)&9E+>zVI!`!B6nqM; ze=Wg6%g1-tII8c`t=GG2Th)(xC;ZJ-{)lsqhGd;ea{>~@p)jVxjpMEyPqYD&SOz$a zc#5UH<+4=g{S`t*$ITms>*5@C`Pj{^AhYOo@W(gYTc{f@nITo@Ap=Zq`0$V_s(8bABI`!X23>}<@J%5faQ%!4|h_27M2?qz#X7Pa1K?zzhz7Sict*SK}D=UAp;Xf24-2i40@amsf7>ij= zl!Wg@$v8D+?ru#hL+3*d>F}PF`iKybViwX*_TKkDhSoZNdNdIRD^7{Ri_(U~JZ!JP zQAJnBj_-!`xl>aX_mk|LS&eyn-ixUn756^j73 zT3{+8(F{Htx)quc=q67`aoAcqsYiBC)w%rjWbxPY=|89`{<&K;b}rp-St;lp*RUeX zo?YrHSsulphKf-3cw+b#(V%zUW3lmLG3)MPpyA`yNPjxJ%t!QL^)p+y(p+TO<#wb@ zOlAnz9diPNAIW0%k45Fm(^$z_z)^3^JmDh9Li8&!DWG{_+vIr3LP$2W52CS z-Ygrh4^NVl_B&?jqpn%W5(We>T6DZsXL&_r_5M7&-C$|0ov9k{LRm%Mjcpb)eR%h& zXOWvZ|55gUJ1UhqW3C8H|9!r18EpIf2R{3b3Wipuqj)4HU74ce%aQOOuP7d>GuKI_ zWg^M9dmOklqNCvL;j@D;9%{U_$BY!_^1y1US|d+X6Ti7f9n;=?9vKG)KRD?0DEU9< z1-?%Eu+TP0YrQM@*RQ)aAi`U=(|dqrjKs9@&ed|)_oz93^8NNcH`f8(5{3^f5N!U-FlWEqpWleLpC ze_@5~^7#j*a?7{kRZRXL-~aOEEk8pe)sE*JUSbiqm0PFWA8olYIsbeYfH})##`2M#E zLTS@Ris$$BN~0?o!3E84S-k6UkMsQnNjg=@b6T2+@yRs{Vr|lfrF-%IpZ4c3Zt)8rClX!&+Ecgc0Jlj+efgWgLIn}oVvxk<$8(iX+RVznib)Bt{S}|B z=DVi_wNRdK2ixu`6s@A>7f0C zx3iXd)74z}+cbaLLP$`3lmTcgc+FC%0 zH$E+P-?Q~SiO+~{Tj)dcxgSx-@_Wd*GapPHQ1hp?m8Lq_ojzS^H|APC8@-OYEFN%hPx}5aQ7`+dk4I; zRsFO`54QHFQa`V|o~=r*E93)v!0hpHLM3w4tgZNRgO6sD0E0K5z@4Otj>x<00K2ta zN)3$&gO~;*0zIEmel*1o#icL2>UM>0N6u{&Dg6>tO6E}oqmwpxNKI^|sb7VJx{VbxbZo5=>PCHk=(s9b0)0WRw>C32SC{0} zTxQrv7QJ2v&gjd(aaL5u@{rBswPW~HNGb_?&|*%l{#uyo0D>mNoXQt8C51#`bjyZO zh_N7!Y#Yz6p;)M=uSEp{`KANAq1`k4X#bi& z%nklk9mL`kBM-KOv{gSg-@LH2yupUOa=c`!&f7b2wLd@ZRrT&1TOI+VZZWpu1dbYc z4pRoiV`;!BSue}1ORAzlE2HyK9>K&=OD^UVRtCKytFGspos6pjry7h}f6e$tbRF#; z5%b6@bD=94@u!N;fBbm%{K!i2Mm#eP_sI_F)!c87PD59-mA<^#_gP%6KoP5e6m}v2 z_ST3dW61*D9=Bw*dfFK%&LQl#ohF-`&S>5;*?T*RrEXf*QRju>k-Cx!#Z3BCTApIo z36qY!NTwdSw9n3#mx3Dar&e~ly@jWG(&Nyfl0|wyOc5J{I6AYnZ8CVQSTxI7pl!pYQKhf^JgBQ4;6Am`_=0w(~T@$15?*9bTqSF0k z7^ryGex^jzN3An!aXUM?q_CzIv`30<>(kQ{x8Y9hk4A8DU{oO;VTlE9#*@dA!Y;Yd zV$Z~_3%uu{{~7NB-`$33ynaC*bS1WZxJ-`0xeDdgj#u86lvog|{%(#!@b7zAQsmB% z;t^^U`g@}`$Znqao7ml7>w1^ruM-EJf+I>!T*EW4n2eQz7q7|fW@xl*O)J^?^rQUV z5>Tb>IoNN2RRNjq7X&<^z_Q27^PSPMDQU4Ur&gff>;E5HK$GfczD}LZMys_Gg_0W$ z1SWv+meZfWAy^xn2SE3hP!4G4hM8mHYyIuyc5hx(tyD&YX1$Wr z=K*h&DP8j2^(4KTu+7PKlQ;$2-qLu^pha>yUBJX;nbVUZ-(d{+IZXop6076a;?O--+{I9IU@c#(4Jc1T@1zXbTHol5Rq^njSI#8sVV8qgD4WfMGOXp2jmBEON3h%Vist%3^R0LKf1&M=rVZ?YE9TJ09qjM zMAFO4JjCk&;<1!!(@<&fes1~FkNAN=_h%8Icf@mdAoTk#Sf0_l38Gc9`)6FeU> zF^tka{#%-L0++@6yQp+Il4X_if?*W(0BI_;s`6@sdWRa)pVHvlAF4?d>9~4tcR-s= zu?WrqR%|)HPvTSFZZa%6fL`Vktft(=+=%^jiv#akW>)HCMa!_=Wu0@tMtlOg8xF_a zxCucqo&uJ2Zkdh<9sQNRanxTc)=`BZoI_ZOmB0gA^uJyg1Pc>92VLBsD$1k-qMY~G zOm$Li?BeGODwJQyn1JPxe%LWpe?V3`tg#UsnmI8e%RrLMbix!RGW#7Oo6v|M7s8{k ziz+0LS^gr-5$HnM>7layjXtQTr@XrWCdiS?Kr3m9Vb~$Y1fDeTfN>=#j!!iL4lUwY z%8~HCYqa#xFMRTc-8elmqG_{*f#_ zCT&8zOu{^*#Wg~4j#2>3lq>>_#~R=L_=o`)1jaum5Z&~`nfzrmG{shqh|PE`h|gmq zk5bc7ycZM!1bEr1^0Kq_uw z00qDX!HHkE!r&A!$2VasywGZv0D?uv9B?RC!mQ+kqDmi*Wo9A@0pe5?ilb+8A5DA` zDP+O?L_daaQZtm~;h2D8mTE8{_}|pE;4(FB)Pik4UkbAB7E&ziUm!HVc|*Ir${0BC zhCp`Gt>Da1%Qtv|@U_v_8rGIsXUiMxNJpq`q{E9ag8vb|QXIz_WL+4pJAeTH#bpUI zFiNCa9C|gw^J6sm{xKE^k5$DlEPg>`6S}LOSE2k%st=guMIj1*NDB&;j8C`Un`$q1CO^wmS^;jf09nG}gpT?`tLes? zJAHhnyh`gxC+#^u3ZE)a5DTeBRphb$#pC@j>hum_=2(U=^ciB#70^O^%)q#c1OTapkUAyR(+9ceS0r_giL!E? zTBnyEo<>^dSxy#~k3*AY@aDoRnLg2Cz$d74DohvNev|~uu}vq9Qs01u-(kK>mrCkG z_oG6ABh0+k_RsC8c`l~=cFVU?5&ld>Dwwk@40K$_J(hHS4NM8{1V6?C3ML^?h^{mm zANI|=Y1L9lcW~HMHPa^Q5>UW#t{&eE1>zTeGN0V(L_G#acL+E?>@*$;B6Aef0kBzF z?|r;rcPw5(cPu};7$+HjXny?+7_` zD9%AZ*0q^n$!rOKs7@XiRB0DadAq)}89n72t3(E}_HXhD- z&eS?3s7Sx!(F?bRttdTar>npdg&`jQZ4ob#FZcqz8?-5%CQNqI_EiKTzPiJDFW)00 zC7{pzDyTTcZa{<6NnEV>o9^-5+mfZ`7gdzsQb+)^eH{;y<7|rfpl{%Vm`fSrI5P{w zo93-wgF?NV&mai(|3~^N0$TzeoSmTTevZBNDF3_UL*VsvSrP-mb>I-g8VEEJ)#nh+ zXaRWfShqn}3Q%8#!w~9&t{VB~nHwfE`8`yD9$=;AF`K-XMv(>KM(LP7w!V66vXmdy z=}ycAEByf3*)RkJS!cnQo%JK1Qqiz!B4>o)L|I4{>{r&4NJ0qcm39&>z8Un2hqRh> z#8KjT=%d9WNR$-geJcvv$UV;0M2U!_iB~*>nr8KtDb;U%cc8z#>?DK|=66XXw5DKA zHIzyKP=XH7M)_v1g(i-N*7QFd1{mB!+8@vjKM^42q4P+?1>Fdk+jDN&vs+Ra7mgTa z0mLxl^Vr{h1WQ^6gB&K|%2-zTqr(9-q{iJ6EYee!G&l~bar5g{AFd|ESz)zqe^NaL z0hilb7R+UOBM6~G)3>n|xhO4tNntNoNH}4IV1mFKvl&L5-?3m!LQosf1DqQXokxg= z-~i7hwk!+NHLl8i<+J3~THoctEYT7zKyKW-uL?f{V--ngy8@&qj)}R(Jgul<=vc>T zC_`!lY`ovXFd}9U!cH|(qz61vKikDJ20;ED;(1&+jbCV=S0FSl@HII28lAe9oLX25 zhA!M)QqnE68He>IDgAud4FTv<-CqMk&EQ)ujw={&g-Km773uyQtSH=hhA0}xIl%)1 zCr(L_Zf3$d!o)xPs%c`rmu+F0Nfl5H{qsNPsMVbXhadzzB_rxxnjeFr!LeP<8vhU? z*LiGmKY78Clvs_nh;_go*cTHL~m~WvjGHeNW4O#l3k4^FsyJM(eXb~NVzS%{M%Tt{V z9K}S%4jlf(cHnQAsiCcb#^E5~kdKE~_V3_KNt}M_@u)cdde9_VL?w|4scbXsNh4d# zjbmbo320%`UndK3m^xT$APoWLLw4!Io9GEAvXxU;IF$KE-Z^76H7ORMwS6q;2fhhn zS4$R%d8CNU*)EN131e zO=-UNJ2}o+6bp(R8vkE3P^u3E#}$DB+eNW>jmxq=v4$ zW(!q~7C>5#j@CJZQ;oKs<9Y9l4ca}m2}obH%Pt&8vr;bO$l*kD=;a4=!iLwb(`*^@yTj8@vWXaYaRvPln?<6Tj$8lmh>TA53XBAYi*RRQ3^-+hp zuzY|$TOffbl4f?87sGI9Lt#s3w38t;<#z=ODtvgjxLzhFGE&j|f*(qc7A5kRib-8j zUJ4=TgHter;Z|S3YM4;((jzRLuI>xWMtma+;9&6&>74FD(*fj_X%2A1ji*6YGDtIo z(-iQ6(Q=Fhr+Cg_&T)3p8iA*(0v49qJwR!>1WE_SAP>vVt&B`d{n^e~^*k^RV?;S{Rz*MFr%j@9<#BuI?cPdnr*O zjDnnag2(*NOg1h^-zio+He5YYr?|VCjNE4hp|>-8fHuS=Zq)b)U`s&f4PNMf!Kilc zzvYNGqsk}zeYa~sU>(*Km7yBZcpg>}6R3ia5wS`duObcRf$*+-lYO}#G%p`CZ+!~< z6S))9r&jnP+3rEs)QW+~ZfNtTu#bcRr(oPj7xbLR(SrwH3&;taLpmiSwXbx*ZUI&vg`4Rvy{pU%m=YgKiqXi%W@ZEY7b14tft^hwarPWAvbLh$T4)ANg5w z7`?}+rdialc9a#==m89!a!ac&2f|?!9~^H6dY{g9oSPo@}6u?4&bfhBPYlLC&>KBNt1c zOnv&YlTA?0HN!Qw)l|U+D5q6XtsucoU@88%&Uzo0r}x3?&Ruy8v?qFZE0GUBP_4yO(aC zj?%gz$I1H0j5_UZR&I2eTqc4!2F}B#6*(y26nJ zPlHwmn7O-ql$#o$kuAY>M<58@K$Vbv2OGL61&w~6d8fj;d2Qc=sE-nENf*fKxFASc z)Wb6Fh%`B!RJ&QWUJ#mB8VZ}5$V_-Fv}y>N0v3i2wMZESWcK=of|PqK@OawWn0O2L z=~a#}z3e@d#+*SJO;14A#&<#H>cCP@2i;-sD6n83s>3LQV z5D}LO(%>ZeTJL)}4M1y4{0)YHY9t~8NyI+@8#l=cV&+Cz(wKSCDw<}U-EA%v9HGVR zWDgr~gk@4EfMMbzY7uWihlELP4+A7bTuelWfpjEHa2leT#+CtKvgf}8tDbWAn=5UT zk@R^pItJ(og7apTP=jG?DeyQ=!Q{kW6od_pkmG7x;6#NXpl>Bl zM_YprO=`DhlTK8H{?fuSC8%DgbNpr3=t_oFxIuvn;xcC^&%@HhcV92ToA~hohL>T&-$HQhT?_gfsu~H zK^sf9E4*f8%~7<20?OJz%xdd3eJX5665c+->{#qf{}c4?l%PvhVI14Kb&og+iPI@H z#jCdfjy&rLPt#`I29v`qLJ|-2kU@-wY{!xDJOCoE6P4Y$-UD7JgA7pg3WeI#Jbx}^ zn&dcTCo=^yfJxAVGmZ#>^SQjzj~azI?Vow=JLaL-FVaAZoEK#-EvmWsiir#V3kEIg z!S_rNqT#CMPt|<)Y7U*9lej%Z84G8088Gg`>7dPks=q~Zi*z`9?FdVC|Mw@>ah&Eq zlio_i@#SE!_%9ItVpd+|T0tOk@<+?H;_)jTO~Hx*f-vaF5gEJL52q7NN=6{-HYofx zHkbzAVGlzFUm|pOnDO;A^yo!ZEUzaNxK{Rhr{G(tp(n?doeEltdsu+j492n=>L)_q z!N!qqQmy1&#w)_>l7#P9X7!GJyp&f!H#kP5+auoYZa07b?;V&tc(VKIxs;>_Qp&}i z>6?}GAf&%wBi2N7qBqh|0g4Yf_Fy3{@UWM{q)wqG2y!gpIig7lT;Z|Jm^l^>v~VP& z^$$BKW${gv(@ii0HvHl9u!da)f@2_TMc6YroM~X9Z!8(P(@GTc1 zQc)#h)?aJu4r#dST!#u<(nYdcaHjrH7F2x?kN;gqS}&(yMNVzpMT)qUp`cryVxWS^ z6fQg;9LPiP9_Be}?f2lNrt3NCK8RT>a&H(=<-#YBWAS+f^4Klw7jXWH^)Sbw=`oy%CGimf%z5K|=;4yc zKY3h;m5cRzfFq2?thRu2DMunv%n=e1DEmkwv9U-sz-_X=EP+XQ4>3JP8_p#YB7`PF zI6(%AF7+HCevCzj^@lDs>17;Q%Q>>XK9M$~#tEr|VV2D)Hd8YHp6)g9{zE+(~MIM};b>r^M617r8F)sAo|mti1z zC6Mba`M~2S5qDswEml|P9&nl;;DfvaHr2%VkzW^vUl$}9MSXUhBw<{1cNz$0fwbks z0)|N4#);E_;mUX%<#g0;K1f+u&wu6g;?6n5z~WXQ-r74}N`4=y(X`pQIW<#8%Ev@x z_4eZn;68+-dYKYceHI%*0u}39Zrwef*$?HAF1jI@v-=M*!1kbR2_y$3w9qZ%w#M9N z3IE%)9}|0a)anZZH$W;r$K4iwuJ=Nfdz%J!U%Ox~T#sY3c`Z%5MC1UF01F33LmAQ=cC?(SREVmuoyD=dlSxr#1L_1d5DFe} zAx+j01~WTdSVnalXf!W{xaiNkV1^;$24=^W8z{A(EneWVm=!Ftqz2DRzRcMzsfcfY zS!%Oqp(=TEk+S{mn!Y3alp@rlo&#{ggv{Y=)dK&AQqC3UthXj$E#q7Q@{xf0eDYNe zObwKv=`-H?#)6_WO`omjD1MP~Z-aA`JRY(0>Ka%I@Oc?mE9+XHzw3%Romz4+WG>BX zNvBZvq{Ij4Ht{PJ{}0&{xNmtvSCWHlEzl&9_MEUD2zDL__8cJ(gevQa-;mwQi1V1TgFOo3 zBuI4;e0SqAR7TPQm^S61xy5(yVkv|27Axoi@_zFMMB--x;T_Dv6?Yldnh@H%u8|niyep8bxV3^_h|K`w1}?#pOsZ2r+s49>%ozyFt%{R5P95QwE3A86Jyf-GSSen;6h zN9lWtWU1Y7Kn_F87$M54Z@c;DzrWzROAuRe0o2SLDW5$)ApwnGG^`A$~ z+s^&SkX^0g_S+tu37dPz$sb#g@-A!m%GSLrR#3l0D(n$-@Ka#~Rs`)9QCG(nPNg@E z$gp|qh@dXzNsYAN?zGyhXH+%8(0%o3ML9I^@2s*CNgFKw(L17L;yth@r(t78{0Xa} zLgeCU59M{KZOV%(`0@8G36JW!bYFL#7GfR}Kr>%1fp#{WgNaYV&&SM(vRajnkjTKn zA{@-dO@s&t3N1+LN9Oa*HGS;-ENIg^79oe1EJ>C3k3v78OICActuiYo?FO?=uU{y( zxA4m{AsFK%gn9aDSMYSIp3A9F7av}7zgkPeLY zU&aho#uYxn1F{?1$2#g;!Uzr2lglj4 zT}>d&41MIp47ttNO}DZspFRF<#)b0F&_+?O!M1~@>(j7xS+;8tHt2WcLjrGjsWyN6 zPI(;sT)3uKcTZ_FU4-f}+39I_e)?bk^!bWO?ncZKOwhf>0Iig^n%UxmQSPFEwF=o;+|Tli(MQv`#s?3VMmkDSWIU*$4n zTM~3DoGEt19c9MP2~$xDE!3xAWd67cQ%-gp6@}R6cg&=r{n>MvxCj~7KkTu4R2;!= zWCkdDeH|}s<};n3`S#XOiN@@D%Z;2|oGp9VmNS>u*ZWiV-sGE~rmQK} z(>s{a8QI&ZUGaU?wD7#^!GVYa(>Z6+WQ_~HHPJc}CtPsM0>v~GTNt1Bc8vPQqTFYq z60?i0dL@Ts`1~Q0^K5x%qNe1oE**)zcvIUNQ`K9 zd-BfI_dY%uRf@H#J8wPm?@_&pv7+)mL4k~ZusCh{!x&Xk&$bi^H{(4`<^3zUrY{W@ zdV>1nUH<*KKhQCqu*bJm>qxSkl`ZsjoiVswZm|EykC#&tn-)qsW*-NYLRlh}U#RBm z8R>b_w>L9NsecdbwQ%Yj(lIiSh{x4Bmq+qQ3<*}g5xb(I63S!J*C-BsyB*zh#!*(Qa3iqdOiKs5gF+nZ^ZrIgXj zI&C#jlhQ@}BFmgw@SG3J3LMxU@A>O* zEkgp=CRIY%~gnf;PL9f0mOlp<3A-8>}MKDXecP8C!tNX8w`0+wJz8358 zKA9@l20J;YCJ7(8nT!)Rdt-dHSohw(GSO!FZMYFP>bnC1zVV*RRWWPhLzTx>E~j%h z7&;~n?TweU=uBujwlk8g!`)S-Sf&-iTC zL+#m}>DnyLoX);_-V&EQK9)_VITmGTP4@Da|4#NW^~?0qd;deyRfa{mZDCrFF6l;4 z>F!XFkWx{R2I=l@X^CM#=~56xO1fca5Ewd#lO_~xJ=Lz`a6Q@E7Y`M^Q$?gQsA^`rdg_k<*wJv1iRa#j8>fbd z&rBc8Y8G8ww^vT=V!{Z-ILgP1wFlO;3``f+)KJo|SMNJdu72dG&ue7=Ov%%i$nev7 zlsDNI;j;d(@9&pYm2c`SK@4^JAw5gr!Z*mTE#k-bBvLDmq#70cW81Tz1*fa&H@!)2 z+!*wgCd{)MBF26Rno%H|(TsfQj2$RDj#tLZH{pCzY~K$|DBx7RNisrdv*}}*zS<;m z2&x``{!dK^g~g-mzxxyMD9p|yzxVb35Q|cLBm$QVMusCROLiZD$O(;9BGhF~p#_3L z4V8`rn-3u@@rdWsZo>UftM0S*JqyHVt?9$H7cv&=mOmV_7of^qtbJdz+KM*Ri$r~YuwI-;-MdnC72j5yx5>1U>cB0BD-G#rUcaG`7|%HoYU<`)Oy`e!W*}wN4=1bb&^7iHVoa z|4l%~gFJ!8??i;+%~W9ZUA1jWuE z&YgV1U3jusq?5DAt-!45$sXOiTwkb;YBX>q4qA%eP85}PKcw^iPUIDfKPFUpn6*`P zv&z76ai;O;kLRZSx_zvZWx)OG9%d1VZ*>;sEK={7{_{={u&zujRTvXN;nfhxlg&PYB=GveoIf}Lj^J5jBp^4i4 zxMo(xa+}^XwyBZ+|1A^VNXe{POa!}UOe}4!Q+w7cASR#xU|g$)}|fGNzf9((ER0_ zMJ6BP`C6<}Oo0$JJSNwh{0K!y&<#SwOuR`9 zYVgHlM|9eSS(v7WCzV$IHJ|-o8iB;GJG4cwi5R`n^T5+B*V^~}3GkPZes~#+?Sl(9 z4aAlsKFzkxu#ZS-$al96O`QApk1+%d6B2 zXL9JjuqRaf9I4b(=C_3A&gCTla-GtuxAj7{{?i)mdH5*lK9iFG1u0xdO(ll%a71m2 z%&Sf;GQrE*Ta|K+ZL{mBG`=Wwp2h8dAS1WVtgcl@jjSyJP@j4c-F^~k$sDAxH2Vf+ z2eF`jE4|L$gQI65KkaUDakN2dbFsXTNQ@n(RfDtdv0;SC8u>{^dx!P@k4y#cFZT8P z7VehiFnVqk`4u9W+x9|@=(%k%iG~5u9oMwy*J|*&mE|FBosk^dw6S#Bx8)UOFTNkh zxq~kVnwoZJlJ}+WYOIu~PgP@@{1P1a8++<=LgD7>=Pb)oWPrGLL zDt6%8-zF*cO~+H40$tu1YYXr0LN7b`#L{bvV)f4ACU?t-^7J*}76H0DYP(b$nDe-9HLxWuYWruHZbPABrkgN%kWyA;z zvZDT&IA@pkariXS=WHm;JKaDSRr0$xNY1q!LGHU&%fSbI4s&Fh>+MW~Eli&$l%X?L z%q_OLJ-y(}koS%%dLA5f;9UPe$U+D&culIoYGJ`MH=iL1y}_fb_g>ORs}y6gf6yN) zuy@1B0#6GU5fZA`pCo1_t}2|5x#4B2>fyU|1K>Ig@lxsNtt@lNQ|>}Za*I{!$b`yw zJgwO;88`6AGek>dh^~)s!UpicmWu5LPaO&id88jSmkVf8N->8ESl};h^>qsX;A*bM zgW~GmP-P)d%%c-p>R<%FWQ?2?khh|?m< zkXT0EHW^LeEuvan=Dfq4YHdy>>5p4;#R(5=1NY(Kh3{~Iplr=N%arhSwXON!kr|&@ zGi}l^Oa!30_@w%GksRPY!cXriDib^IB{s{w{=t5uvL0i8IwNkNUCWi%ZE3q@bX=`~ zMbHD|#XB3Tq&=4o!|(mqC=jkG>cLtkW_8gBf2qma_yV3+9P>`9DOPBEJE!SgZa$U6 zPWB@D`_;#8_FCXYOMm~1ZY?>O75U511*|=%I@-;)G$=u!T_|kF`KhIZizj+4Nl2Kbap2`nx33;9F05=)mM>_>RPspKj+RW&cOc%-&U0MrQw6mq{# zKZ2DZhLN~t_F=zMLxj9X_G4KaU6}T7Zpq!%8<#Kv>)MLnYMl8+E2a|d??mjUyXV6u z&gY~@HiS4vyBl2;90%uIYL~W@SI!3=OI`-XnJu3tQ--yM@Ok-e#!i1V5J-t!K8me( zh>Xe)dI4D5$RxlS`aJ#aA5lIhnC>;$v119nA+$Qpt}+hbzU)ogw5 zw5{6)3DOGc0Xu4 zsJ+CIMXK+_^AB2(y{2BEB_(u%XRz*Lgwt_ZSj_hHN;tk|eR+OMJN(z02wHqf&b}@z z^)%^-EMFV?{OJ9Fw>b(dIkH%Ue@$;FE#7sH^4R}qVsRa_N$ug2(Gkz!;HFl&Z*;YV zog`B(iQ~fr@jo*~y>!_BS8Udx-TWI1QJo^~#dv?5Qb8&Cnn{@ulx~;zUcJMbJj76g zlFFHThiq)oMR%@V`+c9nsT!{Y(r2#PO>EAGaq}%LE@edF1zd`CbA0$J_k?dg^)FuIM!ToP%o_OwqjE)!Mnzu;P(Xz_w~GRUmjRANF*#hfeGncX*8) z6DaK_lkuH%S8*R(8Pg2UDz^A6tOv@Xv9+&F$+d&Sc~|RcAeUm!dSU4I+hE!BPPh{@ z&p5_tYZaTWy4=rp9X!0_LiXXw^a3+CCjde&N!$x0jauL_w*+{fcqvk&j+`DwyYhG> z4tyFr&`Tw7Jy?@yu(b5O$NHVGPNy`DKZ=SSTGURzvkE@<24m^_*y!qUQZ`pL&~I3o z!Jf4s%WLXnw)9Z}Z$LWHFqoDB7o5<>l?HnXW@Zu_pu^P!9tTdjb0(RPHa{Kt<7MLd zo@(8A-Km|I*YD{{bcBX1+R$6wcDDP99Y2TnC&_L$lYhVhS{4*LZHa{uGjUZ30m9$~ zU>Qwaj)F&89t)of5>kGwiHckK?(yd&8B|Ier@a{^LOwjpsmZR1-Rr|-g&OW%1Bh}@ zc^PLKj>*x*UkmuZYMT7xAeAoYg3N6Mi!3u_Fx3DC7Bpv3taX$D5=xU4|N{5KNjjsD)*=&eeK$`@rNsFY{yn1opizA z&L@$_tdE0-#M17&f4lgj+pYW^{idVZGh@u)9Yi@*JFn=viXmoO>#Jy=eAc>mi|c?4 z_O}12vi)`v2nH>f9u;>nM-a!7$}0?M*iRn_l(YzmuyW6=wX4^#v9cljgb?O|FTA>K z7M|DD(qN?gWN0FRel$YG|0E%D`3l_mWb~upNPZ}p?pq!#*%zK!b_~P&yVCWqH{D~x zF@I1gH`Xz@GR~~RSQA|)V3F%=tlB6wqi|n7c?qku!h&GUCQBmiz0LX2ZWbr3TFg{e z%$^Gt{`;>=rc(oW+#&Pzn|H~ClIZDs?p(@`XRldYhK#uHSND~A`?u2fJ|TwRP?q}b z(^t2Yp|lGwk1V$*ed`<=wWwGfPlB{8C|?MXsmgrwTDan#uN0h+e$yHMz@O$=95YY_ z6bhbN{Sa1ZHMh7-XOV}BdKGTh{vS#TmOq2HKIGdYl~n)XJRr`qic9_1${TGNKomGp zjbZ?a99$POT|jsU9rj11dD=`5w?iBKt;jM(z8u4;!E@i)6+R#b)}mExu3;BdF*0C` z2I-Nb5#C>xb$Q@bXO(*)Sy_z98@P_o# z^W!<_P@|BRJ~Id66wnCTYD2}9cPK!K%}@MrEeB3hq?)EJ>mDfa;b`~E?sUCHO)MQq z?t2+kjSj~qk!bv(76LfF=&>V9;ao_YmHewO6j_6$yZj6f&w0RZI2ycf?$+&dXE^Or z(fJ+jGD!LNCq;DFfq%@wA+z}tZsUDSde279=b#j_mBQ9b<=?2>uSLNF zyuT`ES{Q0+f4Mg#nv&oU3QM00rWyD)BLt1wPmC~wbJtd!;YoMw)Uqe?Qr&kL>*B07 z8?RAb(7`JgE1D0?-j=(JL0Hf7&^-QII`Bk1yD!6#cM9#=!3PZA!J!jwN~zP6hcf%q-V-hOgPDR?_}u%P+p#q+ln z6=Lfp+_y@siF*Z4p6#rI>vjrM6Fner!qM!V8e&Z;QWDF}uKno)+@)|AW?Jv^-0>pL z?*4p7M8uX*y)^wDMQ5LDjdGnfquo*vbZ&FaXL2OVog(rD=(g#%)ESyK%u))8r6ovR zh6MW~^l8(+jnZ>A^tQsiMnjX2Vn0q{v%inxg^-u)zv>a|0F)<~cPpSiGV}j!dx-Jz zD16~{gY!$*O+!Ta57hP$j*?GZ12qDcU8)|q(n|M-q?;mLDe<)ePdIQGMLh?O!7=hy6?_=dNOl5M!`o9tU#| zHGzY>B_-1ZeV?zU<`?RVBG8lhP8`MRT3}HqIlR%?tqIgXX>qLknoErH{LF5qXXHqx z>@aj4g;$E6T@pu3sw*tZkNbG4GtV5DO{ZtCSH)IL^WAmv*_rv#SRZ$`zfX!aSsM!)hB8B?*{c&0 z{^VDuXc;VYjeHa8*7ISN72B@-4%8(g><~*!TZzpD<#;dL+M?f$x%0rwk@rp;v2MZl zE&K7yQRaZFW?Pxf&E~^stsCz5GvO`+XR+oJJl>1~;SanK-RsL^y7m@VF(PjoyvZ9D zv|lVwa8-_tI~%PN6d4gY=!gd;ngbtX6X4St9f6|<{C8gnMHeU)EC|0j{xb1yO!qmT z{MCGXwxL9&Ru>uhZt(#OA9;r;qegbTbkhF$-~x(p>sfmFBHCjC(YNL1c5|NM)-UCQ zp`9Y#6}#2l+$F4o_-wl{-qFPsvoq_Ljc_>^af|zROe@S7i9y+!1X7ci0QDmSmn4pWs;$T9+$XgHUz`7g{>bi9$LnYI3+_WbkuwkF@Pma1W(wH1{@&Syse2ytar<88lue z+W_}nt1iKGX?CdHT86-94&3hegp3(G27Sd}oSrTZkhRrCWic*wz7#8~+jQgtl~PvD zgys~V`nhxT4Qw3nr2*tY9iaeFFF^0jyD7ArhiX)NR7(lBu_6W*-9JL3lLeBX>jNf> zD+Rgd41w&Af4b-*x=uX7jt9b9uDJy|e1m|4EZ2U(r*hvRS|!DVrwPr$+S>J%S|w|r zbKZkc2`bgvf6uHFc}%u%v;@DfR{MWWTuT%M+76$V-iH*NSx_pRZ(@bNP+49|J%KfUx%x64*(y zrPT->T~us;Y_VFv8UGlU)w(HwrPyNF{5veH>ufpoM?%Q6AVMuLT4gX=R502HK-@qC zhND0br>vpDpX00j@NF~hU)89omJ)JM!je*yv+LHe6bVwra19j{JUi($PIS7fIeGCW zB<~nLyu0~kWW$kW;F&nOf5Dk_b3q|_H=ITm+c9wt{@pA*5v@m&IGC&?nn3MF4=k+( zYPZb<{}p5G+~TlDVW*GP0q z88(Q&SV0IU4Wv7oo6a`09a??HxO(rKG`_8VV-G*+MW+&fB5>vB+sPMDU ztVFebvZD8-V=L9T@9ehHBScB<(6Or^I7O@&#X*!Pa?C!sqB6sH5vMfb7$MWre)PVI z-FLQe_8nZ*jp){S^ao@hz~uqa!uEIR9pY?o9kZ?=fk0$7AebM4?nA`xtM1jZ`?#4v zf|@0h0=-%degFmMk%&%;M1OwrVUTh8KZk&y;1JGbPE@)`a6G%CZmEw)_yKfw|D^cL z=3!Y!2q>E=0F*Dm^5;r&N~3&-YHZmpYfnD&rF+c~u|f7VtbbF`nwkt{}9OjZ~j#wLjGfh9#2p5gtP-KoJ6M+VYRmhcPQgRYULidno=( z&UEpNW0tQzZR&2kSjv3~PdJ8CwTd*ma%a_EzB5z+K=*-mAh$+oNOl^W?2uu-}77-na;NUA~ zV+`qUvl{C@+C)F<%@X{1%!hEl3sEe$^*nn@U1KZ$SKYSJ%1X8Ix9C+@6^aMsO zNibNZNU%(YFgqZvVuLirtpBp>9%ou_e*vcK`YT?njJXk(9Hot;@rdYoUOc?f2<}1& z*Vm6I2I)E>En*NLsA09Syi;dBmpxkd!S zGTaEL(2sIJPnxJmq$=bA2c>CunnUBhG%|m&o#Vl~SI&{)mwYAq2pto)js$GM^H36w z^X#X(?7LqNZ|iQmsC*aWopLYIiCN%tIO$N9C7BQgnUAJdYAX;cYPgHz=a`~?vb|b2 zHpAO~?c98?9OLfDk1b9Nx;6XVGzJB1M3vZKoY4nmpM!|PjFEfKpfs}Egw{ip1VPYX^w z!~CV&5LPc#xZol4;B3ddy+Xt3id`C+ez5I(t3*QM0nosiO9S@i^@;u01Q2ZyqpBU= z6~Fr3!x#%~FTd;&N=Wg5JEEWky>{)0pvvqbxs^MIvJPkG^qtUPKVXCIss_icVsKhy z2!~|N`7mj+5k%PgH0JQ8rb^V_pGRqzgJvV?(#S~@l84%or)>)U2iq?>y?rQduYV4= z80u8M**W}UM+u1W;H*D2N^4wbx1c4UOz;Mw${HKvMDDv%uv}7s0Ks#<`&h!05; zGJT>g7ycGwaXLQ>HOS0)Rvx2>W0u_#JT3e5I`1yv@&|#j%;jT z^Vz$~;I(Hn%H&ii|29U}u%-7V_3G`B(N9{dakPblvCN&%Gb+4R779I`?j}g7L(B53 zL$hxE`SJ%SJN~q4@65Uc(lrqLv9e(aLuIHle+al!e=>jEoKW`y2P@Zj`?|Ed_)}>@ zFmREZdiPy1*wK3-3=KwC<<}3v_wMw-9z93uf|!!1xefXn{2Ma<%++?Ie~k5g3CS{F z2ozlCyydU>F`&(2hz|)j(&aeao}@o&w5QP0zoQ3L{~;!o;O)D-IZuK8yIW)Y@R*oE z@Xy^fR?ZmiyEBb=JY|F?yalU{Ukm?l)nO0``;5-q!&@LdaZ;y=|ME9R-xe1B16ar| z-FG>7npFWkNa5r%$^;(|QauM1P&JbmW;I1EZKa#IEBNN8z>`+qAAJXFYk6;+J25~g zF?7iR^+jz&HgV+2p|W!)cN{d8&{PY^ZJ3$XZ^&RuG= z6rW)`JK{N8#rA6f^*_f9YDz(%EFD1_pWC!>=IjiPq|+HiOMfAVW6XTx_=AR;?NUXx zPQ#wNarUvJOjx194vEoa{9h!6*N3y}uD86({)y?}ElzcbZsjnm_{yCi^DCL;(*n=Y zQDDwFE5>v!AM@>%Gpj<5`E0#6xpuVA2!KZ+<(uIwSsuiLjx>ii8=~7!`G~|m0FVGd zo2=JYG$f;v;@?jv_PtxqSG*3FyqS76Z_peL=o!ORy5sVA@ZhEaq&4tvGnzKBy-F*i z?SN?-z=@nu2RsXPf+4ea%1Nu66Aspk8svT8^Yu*T9Pkt@-&Fw%r?n!cof50HG#T%h z`Y;olGZ|OMM@juO1LH|C;RiN@r^S?E~QcwAtmtDC4#Fxb(!2Ed=@dYH?st>%`aVslyC<-8; zv^~o$f5nE$>f8oYm3%ag4ZZRAv2R#udqG}gaIYbv2#=w`Ce>^0r<&y5&eeu{Hwg-@ zeOlBYLub)uaI&})ir6&#Zy!@%ta@jp{rqFr>9WGe{rfF^_ua_3@;bwt6{>!zzRiN8 zoxZe-z6GrzV$mz*06>cDo86z~4^qlo|4xcC^H3*5Yd&kz(ysUpzMro8i4cuJ3&<@a z*T^YEeTP;H2%<-()^_B8y%R&CSb`9wp|Bov^nrG`LzJp(U-}-<+_!L7AznWc#XgcN z)L2X`9Y?pP%x|kC3@i^ej!}=1Ql7Vfc)I5+7+tLdMN>_eUc2ijQmHE+@|v4+Ba2nK zgb!sw!Tbh)C-MEwzE9EgqAG{lk>d)}A}R8k^3FdXflZ>(A!jVr=Ij8T#Q>}3967#9 zfXc)LY39X?xJzPc<|oKZU;5mwJT)cYWOUf^1ak`&@Wm*os{?#1L7Nv$%QLGwb%z>Y z?n*vYFER6MOs40_{QA}YyHZWpWnQGQcYT0^s7p!%gx?AxO>b+{Ud?ICK}KiCfA#r= zf}v+wML@B&OS!ZrkUxP$G$LY!5t$wbK!;}F`zNLn#z^HfR@VY>$-iwi?gxttiKA0v z#G_U$^pI9?S0lC8FWmGh-l;tGwvLmFO#Dsx#Zo<`48Cch4!hC4houc$WcHjXwD{rx z8(o}c&6{ujP2B)hTG`(~eA2+}p{52EEITMAgt=Aeg2I>*3*bq|h1=5b7LBa}kntP< z)tWkhgL$OyJsJ5UbjXk904I}VHcf{M>`@hm$S}(P1PFCMLp)) zgpE2?w~n%Kvt%PESgm;dhqa(P5Xt#M<~>vZpdkmyv9uefMha}IUqHhYJukU8?%Av; z`}`bSDz6nE06fDs3|sJHl36VQcx4)XFwfVT5AMO$!k!YG+-H|83M z{~#pUW%#C}l5in}heg0!4b3R@GuU2z441eZ_u5WJJTTtmv!%4;cQ@dSpBChct0w=svWt^Hk-l~uH;1vi+7Ie%M(yDMgDQ=yid4;(zW z*;mFD<#B(<3($fyD5Pyx3=Ii2_a_a@3t6t;9{4^4B2}o;!jDa2XXfV5ZB+@*a4I<} zA;~1!p--)!EF$#;;7O|A;|5YsPXTwJ&h+(1AK*f@DA{?ANI?>+7n`rQLviQQ?*GgXKnM(ggx@dYJ}s(%x!Xj-z$i2lF>X-Ag98z>B{@7Q zD#!eOBeFm+6T%76a@#B*yw|PXUoQaKD-c;-F+ z1u_^qjAE{Y1#0KN`Vv@-EGz+wOdJEQ*WP5%%*VinU=os$mI=Kj7Z}ExBKWi-vp)D?z#0@rf(@^ zxvHT$Me(lb3$T6Lc%$w8q?ir!)&4?y%jR_ryEA9NvsMvyCZX-g!P-o?=$6*j7T}~_ zER|A}S=){97`ISf$iDYd9=z88pXcM#(%HSoL`tVh?>;cXz9{|$N6m_%c{Ah;Fu}|* z!6SCH$S_X*q;n7i0TJtJ9z7!e7d!io1!6X;bMN20`*kPd+)ozQ!^gp0*pId6+7R1p zKl{r+njs+^K!-ffo&ojULux$qGZD9gW0}UzsN))xrC}B#yIgUX8OD*fQtW;vxz#1# zt^;b4)pTicj8%ChQ&}m?u|tE2`^pQ7ZI(O;xmF1Yd4ASCCOWrLY?^I=t%B?J8Y!>H zf651ylWwsa8S2qFn>gy?@qYmlkU-{)-S1tB$|PRz|Gw~RIQ&kMHVnv04~tKg14#2r zDbe+SA3x#&lC8^!%Gp_nmnUvvIJ>*gVK|SwV*i34Y}-(=u6&A1qum4->s7q^jDklS zr|w;xNdoPGrucNGG6jH%Ih|NfqiV)$yWo<@FZINzODUEl)uS*u>9xJgZOK8 zb%vw&HMnujaxucI$z)0IuN?Cxdz7h#MbpOKl?se^1mc)nqn}(HRYB$KW@eCi^iM}2 zsPMnSqADosrayUe856?%%2Lo9eBHRxYDA7sqG(cttr+cgpHrXLQ~)~#XdpwgKkjmj zB?6-g|EjqEw~IT*^Mh!!L5l_n)J~hdvL2ZYJIN9)IKF(a>u@fiG6hVugWz5t_I!v~ z70`a;!KOb3dTdh`ICvizu)-Sj<(YrkSYoETl$cFN(?jL#)t|*&&uzC}&ivR_&G!Rf9oQmLkm#C-%p-3lJ{VKnJr;w zqEiesPKLkoA(eTps$*||u?auMZ;uFjEX1J|lI8eW<@$^6UY9oc>y=-EoeHtzfD{sO zK+{mAv%Gptq*`hA4vF!mlk-|(s8nQze68B!ot@CsQl9xBvuRJ&d<|^ZdphM=A@JPR zx1=I>;6VPLbu&8B!M~XOV@8w%@%TzZ^6{15iNz9QZe4ZQ08bN<7w?{sRl!1XeI;-o z>Os(HKVL)Us2KoX9yE2ghS-l~c0GM+tJQ=W7F$oze?U&(i4MEMQV6$CDThh-imWg} z$*lEe`E>bP=Mj&axf=#L6AJ%`?;iPAy&0#(BmEtuqBc?itl47Nd)V6~6=iRhwe&$) zuV#h=Uc2^0O`j5UPGuqQXLX*UbuXPzn2P_6OP8S+`TWTVy_h-45I^Fm8Jx^ArP=`1 zlKVwg?iNQpQ2Uoga1|2%3HWfP14ktXoGPi!8CZ|8nD5%zXwUxs9?rqsxiu=U6q|5u zIffHSc_0K#OXCLe*ZhkGx)+P1n0kV}Q|}*vK`im4)0jJ{*>tz{G>4OqT=SLhR7)YF z((ddP_V=uymFH}#jP#=w5Cnse^OF}SANv~=S~MJC9$^voZ;k%&xXxZuEXRRwq_vmz z1t8N#KF5}sqYTnLWp3Tb+NKaU1%x^Q`*++)`l4a8v>u7KnEA#wCwz=T@B6yW10}%b zz1sy{5zPNez;2{pe*N>C36~!FYh>6|X_0GWhW6{YEEb;8_tpeR+dL&&2h=MY} zi+DZYSOQPf%-MBDjkOZG9RF~36Wj_7n;DPy*Ih)jzFjveVaH`|na$+-qSykhAR!7) zqYM-%F_#IV^+VBEHV}mX+JfS~su3l~@dNVlBdQz88;9*!nq>cR9eLBF%}m#{&_S$* zrpDk6`yf=f%wFaI)M3d*9PPL!L)Lqy1j<*lahbQu9ao^K#J4<;_u`GsjN~GSd)cXGZ@r$>*kVxqTKzoXy z10e-}cFUcvj06X@!8RL)9&+y0grjNwNh&R zu-_q^X&{?tCZf6wwk&!CH|DJ7TVMKefehU6j0VDA$~L~Yh9F7g0YpDQ43J@rw~N;4 zckf6{KzGY(w1muq5Ur9jPOt$!TfsMofn5r+v9&R?gvr6h|9I|X&TDM|XaXZS1Xv^@o}3J_v=&2uYU(>D1s zj30*)6ENceF?8u~TiMq+-VDvIQ;0Or1XCw);VZl|D#nWGv_DJ=rVnA&ZjBtyo_{;8 z7qZ`tY-I&9zYZb;s~F)fTvqyK(%k8YVX*@IznGbuGy(iFTYQw>T0$ZRU=#n{5i`8@ z*u@l|>S0*x&Y*7);5iCO$1%bdojA|VnrpsH)Hg}h?-;X2DEGp<9)|u9IR#1Xjye~y z?PCu`#e^QGQK4j7D7X@f{*}K%zlcV-b-Y^fP5hS9_G?KDE>ay6BoNReD53RrGbybr zeOj(=^{RjjPDgHF$L3y@945eTx=L+v^~#hw1|K*oiK{29DZ)>)_U{J0`o&x*Q4;Cy zpNb@OowIYaE2`L_+}r5>IDMr$&~5eicUc+u?1(%N3O& z_V2&Ohu?QjwW;Lfyv$a)hyE!pZQglRoDz&BaMc#YCzT10ns(H$1O0c)c?eN37J-#Y z0lk7!LWv4+*Mm%R+&u6)X?r`7T@w-Cdc^kr)#dcSUon|Z&*jxjp)8S`*&@2uk9ob? zP^8}pLeokz{>D8|8Mf0nTYK&a!-h!J2~)>UC6Z1|1saP>Dcd60S^}^<=J?Vy7KCxL z25~e#ffIv8*uU~wOg;Q$wb0sXYI363ATn|s_5d978K5nIjn@J;9#0TBFf7Bi{+pOc z8))R3kddo~;lq=So5k#578HD%$%@;U$qWZ?>s~oVE*{9!$+?~$v2~C8pd~#P9VUy8 z{S926wZ{9V@WZ446b0Ni!5ecB1v0Q#dV0O%a#p+L$#&%SAIpuoCSAP?k z?umj$Vfq$?e83@DKhexs@A@}uNeQg!kLA7)O$b%ynp{$B?UJ}{w&UIt|MMIbvGwTh zo=s(bh?#25!OL|#cF;NyD;rToLj&3@s7IHv8v;2%xQF_`kzIY|LBDqe3C&Nsj|%Cp z*?kst$9J7_`h<*Du7Obp#AA9pcq$;=F3neJU54(DQ9e9zRdNJ86SUiUOL}T*Qn|Ih zG<}4(S4K?=I|ZN^9UN6}27n|Z{HBNIMW=2DEAV48$w9I z0a4H5ix`Q2`c;oFs7Q68wL#h*`hNL$ra`v=j~!ausP9I)EiI1CAG;S!QFpf^7f1Q< zl?n!_m}!9r0*pLd^(_e*ST1{1?V{jz1Q9M^aR&m~LUrYUgmeJ*01WfLSVF`J*Uxc( zntZD}8^HgRz)h^JbH|QW%#}>H`Jc5F$GDD%@MI|M)k$YP1mz<_s0W)XBgqSG z%DyAWQhRzH;h~S3DA3fxrEEmA8qL^h{5-`$HKN{-#$WSnk5AFWt-U%rjHVkW3~age zAx9FXuQ7R(9WStJK{Rp24jEp9HWyURED(^P+vssW{tlYUTE8gLqd8vry4D@=Ng1TN z?`78^>$oyQ+~Bm~1;2&=TGRzU|Bq3B0{Xg72=|k+ybOt{16v`L`y)jV?XOVWQswmk zg4ULMwyVk16$Yz5fQjCLTrl1mo5-Jl!XG9$=%c_|=mQ{tCVU#5P{>oIh1Cw<*53Rm z!+Yv@6&>>85k17^Uu3hy4m|TQT@VqFgAe}Q6oV!UUDd14{UFP?F5@mPHjQ*XM>m== z96|Fh=wHD4<1&Z};vA+Regbq5qK9<4Qn${UVnF4U_Pe$MK@{G=e8bru1_o{aky6F_ z%Xgq^?EhCau9`tAxeP`FlLZk?viTEN_|=Ynbh^Y~%3l%!2E2BoX+s9n@tpezAAC7= ziQrM`!>7(+F(|Nym|%_q;cr-w+PBS5`&}PobXd6M=+nBdm0yVjF$K^bZ;_=Vd~z2p zcd{-X16NTM9at-(i_sIn2|KN<6fd~>UFFtT)129<;^8v)KHqUE9qQSZBcwXIk^Y6$ ze({T4!oOqQ)33f)=PD*0L$;Y-R-~X)1KYv<#d@B{hcU}`%)4w9wj?Ombb5tcz;uG8 zYJpoekd<~~E1%s9N<`sUC|s@653TeGzZQWnq5oNf`t#3Ty zAjzGY(^|v9;F>`M3kp4eRc~U4Jn0!^?X*x(>BQ^s)y7gkT*s@JN};{w%|ZY8*t}aO90s80uT??ik0a3Fa6;uDZlUPkdNvYDa$YkJ;)MDXU$hl2 z6@iAS97w70;m91d7>R@fx(g^`1X*2?KrYAy!!1Q$04zb?*i$myyKh&udCeO zLFRmwkHY0+GhJgw%5i@vLM?3f`nB`U9yhSl#q3Z)s*VC^BZlbB`v4iPou&f%@w%ln zrn-8OPl_EFRzfdH>(0>gQPh78xqVT2mz1F8C&%GXI@J!Y|4++_3X2{2zX1z|r+`kh z&Bozq%EaX{qH;iv8y@%WDWqk4&-_;l$xGY!500d2SuX|Ql(YRnKSP>KHeYs=9Mx%FXk*h z(?C`NIxysSPTH$FMxPOGCv5p@3>}Ix{5iVlTB9cJG`+(@OAh}gE>STk*5g`@(Ra2o9qyRXWg9Z}@;_&{5RgFwX1-S?lB^d^?9 z-MN^eEg%BXoRIiZUOo_v@m=~Ij>{~7^eY;vZI>W#8)kJdtDl8zSv?OJ;iD8ZVapFV z8eNH{#dpnmOFDd-%0D)&J~ty|dnD5hJRKN8XnG8R%efsm*8=}$>p&Z99%2O6kjQD? zemwZcuCYg3HHZb42~XFNRiB z&qk54W6kc`>*VnK z?dInK=E%PZBqye^;UM=i1C8J{l zSlSPHs7ZRfsk)_E3Oc_j`pV0gW{QOPKj+w>Pl6WS#s+g9WI+djGGkZ$s8R5|XepX1 zrEf=Dj#5~ZgF4ogj*n#cv`k-Uz!p)K1)`aM>_?kTGOby=SLcCv>GNOdiq`;f54=S; z#?f6JlAz6FzJ1+=PB1!L!`pv2`|K7SO6Gni33yVss*AVzV^fKp&0J|yS2KOc^aMyJ z%akme;ZyxJley-4s;wP@hAJs-@v{v`*EjdaFR5$n-zYho#=sTVj*N^B!UWIDz}J#AY%!;XDaiP@O+4Zf5&LlpMQ6{OmS9#LIu*{C zG?{*1=%OCc-=ns-b{|x%^PX>?qrj~t{{y5>n+5TK_TVtyx9>7{=s#2n0VwqCqJ#b9 zu$F1A5Av>g?Hf8;^4N4bSwZwNUnO{qo4u*&w3t)7I?cjhrXz6|J{S1$!38~pn68xs zkFMT?Yk@|jFHj7~fB(e*nR`L9>VSR`0p!2_w=T{pcTn9F$JO^Rs85V+l0p$^Rx=rX zF(Gv;Z!wsVQ2SbP9XJgqn4$2ajwR^Ryzchj-FL0!;q;P`T1f|);GQ%D{Qn>K@vR7y z)iDZzdGZ)Y|94j#ulz5zUykkAwVF^5XEM&E{&Y{#OkI%4@LG?}-U+ipvNEDHM^@(& zAw2BRbSs%~TtyVnr&~tx>nVUMAWPgI3R$-84zRQw_{}>l7pVYiP!WCZ;7#vlF zAIi6e1O8OrXw1>HWp;%As}4QJsT4FxUU~yxDmCG0j=o!F&phx?fc*rRJ{YLOvp)k_ zdy%CKjoTgQts4U#?uW$;gX^cA5-11BSl{C?UBg(RO9_BvCvKT_jiDjcA89?wNC4Y^ zT=T0t^4hM8R2at{-9JSIbQk&=F)S5o%v9^XOuWtf`hg{i5Pff2x6 z+%1-zW(=8KmCBcJclUmHx_H(ms6`QJzwzahK#le}n%2opugMON)ZH6Z$4u0hr+>^0Hztc=1G6XexZu+#~O5bk-cE2cjk&o<@i zHfIX-oQ5gizLliD9j>`41}LB6KQX~s6x>j!FtrfG4`ttI(S2X;yQM)&|FgQO>KLSZ z-Z=Lehf&;`b!eomq=us6pTla=&H92RVsfbN4IU4MT4~bPo%HrQ+o_$39;)x;-yxse z!L@^dyyKrRDt`j?+ayQ;P~SI2h$)!gc?+C$&<+K*%!d`uXGXZSiZVQpj>7K`0G73&l;R>smR}fec2yY_BaPg0Y!^Vr*6I{U7II^D&1XFX8-sa(lHu1cZD5gu_)3d z;#<)Lyq2oCIDKvM6%tMit^;iVN7VU`3a?PkhPw!;a=3IT-Q_+_Ka14hGirM(d#+IQ z;QWRc^nOt0g7zNp9Dron%^W`YM5*%(UrK9HOL!VoqPi`oY)QdQ(X&m#rTrnaa0h$arAOr8F zD4hQUvlfOw3mTBhTam?1KK5m;)wUXpt;S|^8xIn$S<)O9)qJmsr7HS(CSsgdYIwVD zqZ+DE6cc0Xb1Rhi?LP8ULux66Mat1w#B_oRnBg8ioHL|6;8UN&{vj5xrd9w}Ksr}@ zuVv$NK>=q+6#!2vL3cqy2=T61gr}2HH{lzPm!Q+>C1n@R}4^1QLGnd zqHcbHz2^k>2Qjf2Jy=()MRD1zFB-4cvRjc)e5_VN<|4b-ql7=jaOcnv$k(6ITiMu} z-fBXQ;lTmfYjdk1BGK3J!FiM22QF*!E&CIMoz)q`+|<8)IbB)Al%Fyx8%%;F<;M=Q zy}R!BZa!Vcw4E*InSqTz6mg8X+Z=%j0ZqdNPT4guskS=em2-I6zbp<($s749>PdA0 z`hSt`SOfqnB#SQK=BL1y@gWG6sm+4Az#qUKf^dX}E34xQhwHvr`C(nYHh8y5XM0=R zV*&cuTYhbuHq1wtO?`WTeatRlU`SyF|y}a zB}4?u^@5J4UTSPH`-1?^)~*6gnJ%4tZgN;50SNy=xa-l;h;5~}K7ljO%#) zP>hwx{0k6}x6je}F&bc-2HvD{Haah%FDxKT%eE5kRYp}NL3Tw3=1rc|0w8j82wlCo zA#^Bn2#}zM;DkN@wf&W5!=0I}mv4S%qJRg}4f!-1Oc%rmCF@VvI z{}FMvyN$=>Z8WB2`g;&Gg9#XYs+IX`tWZUnr#uR$Tbh4*Md^uG;?{t&>E&&~${s`S zG;9-9eH_Ek+!ep-^8;V6IIeh=4ZAzc!gN|r1!l}c!_ z$4Fu<8RcdvjF2I@){q)Tb;Vc6D3iU#)n?yAD3iD%OPP?}jR_$mvdnzXXXblfzwQ6u zUw8hfmzmEz&w0){&w0*s&ikBs$VzSgY=1BX3cdQ<%J%G$^RU`Cbf_Y$5s5W`L_~FR z(hFxG#=Qhb6}X5Jz~A+LIijU6oGb{z>~Fkp9{8 z-sZIzK5l2*3_n2!2T5{#jT+g*+~~{~EuFiSPKL#OFIM<)>}f@VP-dukLCTcn3n8rw zajZ=TL>i61R93SixDeEVF!H)+5^d5b25->K2 z3so1}{KWY6&9%OYB5>U=hO(u47qwh}U!+kppcz#E4|xIr`u!R+vXnyT_}xnr2mjiQ znF0s{v;uL3Mgd{jIRXbD-{xoJRThx}`TO@L4XN&6E^eYr`ty$|Y${xWYOn5#PsZKk zQ(i$PhYvzKId5-i`ju55Mz!D<4*=K)suJEHwNniWdi#EdP}u66ZwvF6iQf!weHgXO zJ2@yWE^dY#UV{iI5$!k{ynoIXI+Y={z;vB){{ha`q^(;c&O<5Pqv7L?^=RQ7E7J0b z^X?m^MYM;bnN0S9r<1?lbvY|@g^}wqI!`G1>n;uHxaGPI%?FUO5uA}yjiOZ;2ZhA* z1T7(%w)6_}1M?*ns-b09y6kl%-qaJ2cx61AIu@vraY!e7Tj=7;mseNb&cB#T-Q%e1 z8&n$k+)KJYHT<~Xr8CKKUeZt(WNezYy3ld=t3Kvz1^BTSp zqL-^kuvzOxLWgz;TBg&bvQ|YFWV(yIn_a&*#_zr|p&pg~?XzB~(GC&FCn~KcJVkl- zoNZ~_8SB9Lu=(&sM|piKb6d>^hJ1bH&Cqmb>Oq57uT+*rOw%M9Y3!BsFWiT`f$Me* zNSin2r8%q^pLYCe=Jy6p>%Cu(Qm-0J1fv3;H0FqgbJ|aZFFf9PSa!lsx)QzPc}tAW z>RUJ65D`VK^+=F4ZRUy0Ws$Xfu;+E$a~eYcagggxDK}*IOL>_{f1Pj{Uc(6}csE%i zeIrT?BD4Ti@C_3$kg@f`YnuzAqR_@9dU?V7@=P7agPA1{>yZ>JKi_etf(E2zP~J?w zNz0U5yL`l%20f!7?~>=KjY%2kx^$Y3@6rWWw9mpQG>e+NRTq+}Q53CFY!D?wM*D$DlgXTA3}|nMB_^ zq{=;(m_o3OpY@C2wPY$P?Z05F;`Z00i1q2}^z!M6&X;gixOY?Y*XfJ&UV8lT4G!#C zqP@+6FCgqPjvk{aAPOf`T6()%EkS%dGov&YsP{m4vijuuRydx8=1+ z!9lkmW}o;*OT{?*kc*^-l8-g>Lo+=9>d08pvutlKJf{yKY(B{YBvoj`bV-lh{6StQ ztdTf}YJ0ijJr*a+@DV=2*rO4wy&VO4J+uVp2MzYf;jl3}xgO$|J>S#8)Y^LYxqZolf!2eYTeaXAORjRd0dYgZ-n<{{Bhx4z5dTe6 zjeIqU`&nBORKSScDlAu;BD>MTNLF^rq3G6K&ZSj6o$317?QnRoAlLyUrLTz!|&rZ7(|9KyG5fyFELy!6xc=Dym?m~9Th2Dk-aV81r zg%(g|V}oqlDA=|ShtHAmXRkyv20kIl&P#Go<=tDe+^Fe%)#tf)+0OfRky%_~qh;Vs zm^7Q+lam;ZZM_ma|HU_4bW?`y=4maAoId8FFKPU2ti3JURI`p{RPje(g7WJ8c`03(KW&6_S{0Nc z6ab7U{>`-buQJ-@wcW$DWu_Av0oX)70H1`||ClA-^S6=eITjW0jmDvYx%>2D?1Xf8 z!Q^*=>OMQ7$1Xsf-Ku5@2bjitv_CyG_XjB`A0ODRoZObE^kAZsKA9PIt%9?pm+LZA zIZ1m|wY?Hhh*-ABS>M!bBw{1%R|{p5bAZkI{x-|m#3L#Jx@|V#o((hw^+H-;<9^4jq57O7_E)ve&D0#aqdh$uauA|q z9h-K28fg4hL9?zqCkq?UbC6+Z@=8IZ!F}k1SGK$RDRsA9a#?Cs)lI@0ZTZ!NJm!(? zqtNPUfIT|%X|}kjY%=sll4vnDI=O4o0oz5 zGApH7p4ZJh0jj(ej3op0<~IgLEiqFOGWPDr-w;!r!(!({m5tYy%QE`X+uNo+D~m^F z#JXxoVoA6le&*Ur(nzZ-T*M&(BIfrxp~+m=NvG?4`dcO3Oz-YLCaHCNu*kTIaE?r` z^oJT;n<`2UY+8)kf`S?RzIW|nA3luMr2MCpjbLZD+}I4qp_QE@8WL-+!c~G{o)Wan z8lJk^8@@OrW&;76C@+k#K$8q^IGffGcVizE%-hc|Mcm^wtUwSJwQnLfb+fR*L#1Qj zhe0m%1#{<>Zxxs4E9zW0@zWh1&k_Zn2WzXuQNE(V`E4`vO&3`#C{(O*csF^Te0%3yP12@N%JHLN#lF^HN zKzirVehYpkVtllv>Bz{ByoCiyYdM1mWwV*kj?Xi%AlB6=OLw zG!cVK62}O*q&Yk1H_MMRqbL3O_qPh1N!2Oz9zVeNMR~hF<97<0Dv%?GjhQj3*szJY z^IH9GxUakF+1kQpEn1Vd9|;zpb3dEhHQrF}Fq<^fRa}u=`Z@gGz(AmyPPYwt>_%&z z^4=ps%pGQuz@0&GE)+T~R-b_|QnHD9BK|u`>y_yB~!YTBt`Dk)$N4@cwdRs^} zAM%wLpL_^XurS2jy=ppK?2RJ%lNLq)w{0DdK1?1HC1e$0?xCsloFE&4OlWLy`5jkh zmx~d=)9m?*$xZMZ8qbyYewckI0MWOijPYQUqUr{|tV>*nQ(*3wS%82=SLWi>KZ3n8 z*m4{D1j3ycU8;c0$|*YtS$cEh$XId6@A;(5hza!t^qBS&Dtg-EueKpc{E81iHQkS29-u%4|fpYZ`6r$odOzmA8417G|NKIV23VTyvKK zHY}xbhYkXm<0l(<%qkEb+a?g{%X4QA!cEJL zzn~iC*&qma5>e={7qcH`0}KlGQ5_Ua=R%GLe0^-TGnSeQb=t4sU{^;XWR!4NZ)XF# zo(EOJN&(44v~zH^sQ9*t#-M9(dkXcaa-u|Uko&LG(roH1DCEdBn|@V8R}0`!N7}E{ z6PuPYdE6-hMtXa0iNVT~f0(NSw-W}ksTbusCg9$b=sXO)c@#Pu8X;}#0WDMMaEGBE z>IV*o-Cnoa&ebFVI3~iup9G~<;3u=qm0=7#9590F-P5|pVb-TXsM1{aBtC(*s}#Ig z+E-8wJ$S6eJ$;J})?t+n142o0+FqADAc_U77cxefxpt)4P@AuTgtWK{W~>X#^%)`| zJh;x1Qi%k%sDO)Rn7y7kVgcU|S>9)j=$Jly84bL!-^HD-M`RV;BbZ>!8up>>5DL-x z9ml}GLweh~9)p>5-XKeA;($%R!J8|7BE%3`Us#hdB1Z^gDZst-WxY1?Wt30-b*`CH zG_?h*BY5Onuq@1~^mFqK;Lic+Szr}jY{`y`kwxM}O)JY8$S&O!r(m`T7XCIAu{1kU zyC6ByskIat2cvcJAEx4t!kV@V;x4O^P=Qr5D3e&$^eDC^RRoXx$18m-`9YMn%|__iIRQ<)X@#FLecb|L0~ZO&I-S^9r3>WHG8MgV`C6L z;{%PnOo<9IQ}pI%LpLdem{rh)*7_vwY*ptn_+TGJCBm>65&`b8hk-3f(5x^5bQGV? zJmi1|zDfeP7XH%z!duU&%%fcwu|$nv;jEW!7TN28S^R5ol4Mf7R}pVf>3!%bIz z2cjF9K=!){Ps@^W!di6%X!U=N4{SMw!m zhrx;v?HB{~T15N4aJq?4&jyA60_3J||IZlD;(%391ZD(Jx_*%Y@x>^SRVPPb8o`(1 zJ5RnG;*yWCd5^4SyI1N7dF~8B$PAY|xHDjLBvAW7(wN=Lj@@tfC@@DsrB+Y|pi(hO zc04{KxNmtri7JE$Wq4xCZ5BCfv-!hbYY5i%an%_6#TjB+iP{~c5(6ug*&uBO5&8Ej z5Eq<-j-CQB*u7*;gftcz%%UOIY5*}*fJKPaLLozp7-qFDYp=tSv$}hejmtjqi2DJu zi>uc~h~fYH>oBvTFuH}u{gE#K`1x-it^vhZ(QYtUP%3~3mfm`h`v8obj;7YCy+tH5 z%a>>T7Q&SBiP_0-upb2mq)?Ya&A1J~8hgpF_!>Y2h;c6K{iHLSQ?IpN9d{ zktGumHqq@Zz$iKM)TOcyd_6_~`|n9)I45ffzJ_Lp^?1SvyV3TO8qNBdD`ta`yiqyE zqk>0rLc0zTlW35&a84M%T)PEXc9Ar1p>iiD|Ic^;{#1bdp>H&P)y#PLYWGbL{t8R* zj&aF*F7vZZr{IuC)NUUyF|Y)Yjw1q=^bX*PjVZ1>ZlT!u_ecKRkKkXB=>PoaF(-=Z zsr$c4@0!i3ohdLBY?GNA!ZnJ^%}nF8T$g32wY$c+D%+`a4XDd6X^6;uw953x0eo-p zHcJ_JL89?@KfMgE5um9^zI2n~7RJ8cYkZ^FR}S#RFE(OCU|9{z_L|LDd)JLUh& b6tLw#_;o*!?o}G%gO}NHE8~14muvq9>AMf( literal 0 HcmV?d00001 diff --git a/docs/src/assets/logo.svg b/docs/src/assets/logo.svg deleted file mode 100644 index ab26f7830..000000000 --- a/docs/src/assets/logo.svg +++ /dev/null @@ -1 +0,0 @@ -SymbolicRegression.jl diff --git a/docs/src/components/AuthorBadge.vue b/docs/src/components/AuthorBadge.vue new file mode 100644 index 000000000..2679c013a --- /dev/null +++ b/docs/src/components/AuthorBadge.vue @@ -0,0 +1,139 @@ + + + + + diff --git a/docs/src/components/Authors.vue b/docs/src/components/Authors.vue new file mode 100644 index 000000000..2ce9e19bb --- /dev/null +++ b/docs/src/components/Authors.vue @@ -0,0 +1,28 @@ + + + + + diff --git a/docs/src/components/VersionPicker.vue b/docs/src/components/VersionPicker.vue new file mode 100644 index 000000000..17514e206 --- /dev/null +++ b/docs/src/components/VersionPicker.vue @@ -0,0 +1,130 @@ + + + + + + + diff --git a/docs/src/examples.md b/docs/src/examples.md index 79f41ba53..e0cab068b 100644 --- a/docs/src/examples.md +++ b/docs/src/examples.md @@ -60,7 +60,7 @@ println(r.equations[r.best_idx]) Here, we do the same thing, but with multiple expressions at once, each requiring a different feature. This means that we need to use -`MultitargetSRRegressor` instead of `SRRegressor`: +[`MultitargetSRRegressor`](@ref) instead of [`SRRegressor`](@ref): ```julia X = 2rand(1000, 5) .+ 0.1 @@ -85,7 +85,7 @@ end ## 4. Plotting an expression For now, let's consider the expressions for output 1 from the previous example: -We can get a SymbolicUtils version with: +We can get a SymbolicUtils version with [`node_to_symbolic`](@ref): ```julia using SymbolicUtils @@ -467,7 +467,79 @@ println("Learned expression: ", best_expr) If successful, the result should simplify to something like $\frac{\sqrt{x^2 - 1}}{x}$, which is the integral of the target function. -## 11. Additional features +## 11. Seeding search with initial guesses + +You can also provide initial guesses for the search. +In this example, let's look for the following function: + +```math +\sin(x_1 x_2 + 0.1) + \cos(x_3) x_4 + \frac{x_5}{x_6^2 + 1} +``` + +```julia +using SymbolicRegression, MLJ + +X = randn(Float32, 6, 2048) +y = @. sin(X[1, :] * X[2, :] + 0.1f0) + cos(X[3, :]) * X[4, :] + X[5, :] / (X[6, :] * X[6, :] + 1) +``` + +This expression is quite complex. Now, say that we know most of +the structure, but want to further optimize it. We can provide +a guess for the search: + +```julia +model = SRRegressor( + binary_operators=[+, -, *, /], + unary_operators=[sin, cos], + maxsize=35, + niterations=35, + guesses=["sin(x1 * x2) + cos(x3) * x4 + x5 / (x6 * x6 + 0.9)", #= can provide additional guesses here =#], + batching=true, + batch_size=32, +) + +mach = machine(model, X', y) +fit!(mach) +``` + +If everything goes well, it should optimize the `0.9` to `1.0`, +and also discover the `+ 0.1` term inside the sinusoid, whereas +this might have been difficult to discover as fast from the normal search. + +You can also provide multiple guesses. For a template expression, +your guesses should be an array of named tuples, such as +`(; f="cos(#1) + 0.1", g="sin(#2) - 0.9")`. + +## 12. Higher-arity operators + +You can use operators with more than 2 arguments by passing an `OperatorEnum` explicitly. +This operator allows you to declare arbitrary arities by passing them in a `arity => (op1, op2, ...)` format. + +Here's an example using a ternary conditional operator: + +```julia +using SymbolicRegression, MLJ + +scalar_ifelse(a, b, c) = a > 0 ? b : c + +X = randn(3, 100) +y = [X[1, i] > 0 ? 2*X[2, i] : X[3, i] for i in 1:100] + +model = SRRegressor( + operators=OperatorEnum( + 1 => (), + 2 => (+, -, *, /), + 3 => (scalar_ifelse,) + ), + niterations=35, +) +mach = machine(model, X', y) +fit!(mach) +``` + +This sort of piecewise logic might be difficult to express with only binary operators. + +## 13. Additional features For the many other features available in SymbolicRegression.jl, check out the API page for `Options`. You might also find it useful diff --git a/docs/src/slurm.md b/docs/src/slurm.md new file mode 100644 index 000000000..c329a36f0 --- /dev/null +++ b/docs/src/slurm.md @@ -0,0 +1,175 @@ +# Using SymbolicRegression.jl on a Cluster + +Here, we look at how to run SymbolicRegression.jl efficiently on an HPC cluster managed by Slurm. +Both methods utilize [`SlurmClusterManager.jl`](https://github.com/JuliaParallel/SlurmClusterManager.jl), +which extends Distributed.jl (part of the Julia standard library) to spread out +worker processes across a slurm allocation. + +For other cluster managers, see the [`ClusterManagers.jl`](https://github.com/JuliaParallel/ClusterManagers.jl) documentation. + +## Example Script + +Here is the example script we would like to run using our slurm cluster. +We will pass the `addprocs_function` to the `SRRegressor` constructor, +which will be used to add worker processes to the cluster. + +```julia +# script.jl + +using SymbolicRegression + +using Distributed: addprocs +using SlurmClusterManager: SlurmManager +using MLJ: machine, fit! + +# Figure out how large a job we are launching +num_tasks = parse(Int, ENV["SLURM_NTASKS"]) + +# Define a custom loss function (this will +# be automatically passed to the workers) +my_loss(pred, targ) = abs2(pred - targ) + +# Create a simple dataset +X = (; x1=randn(10_000), x2=20rand(10_000)) +y = 2 .* cos.(X.x2) .+ X.x1 .^ 2 .- 1 + +add_slurm(_; kws...) = addprocs(SlurmManager(); kws...) + +model = SRRegressor(; + unary_operators=(), + binary_operators=(+, *, -, /, mod), + niterations=5000, + elementwise_loss=my_loss, + ###################################### + #= KEY OPTIONS FOR DISTRIBUTED MODE =# + parallelism=:multiprocessing, + addprocs_function=add_slurm, + #numprocs=num_tasks, # <- Not relevant for `SlurmManager`, but some other cluster managers need it + ###################################### + # Scaled # of populations means workers can stay busy: + populations=num_tasks * 3, + # Larger ncycles_per_iteration means reduced + # communication overhead with head worker node: + ncycles_per_iteration=300, + # If you need additional modules on workers, pass them to `worker_imports` as symbols: + #worker_imports=[:VectorizationBase], +) + +mach = machine(model, X, y) +fit!(mach) +``` + +Save this as `script.jl`. + +This script should be launched once within the slurm allocation; +`SlurmManager` will then automatically add worker processes via +Julia's Distributed.jl. + +## Interactive Mode, using `salloc` + +Now, let's see how to launch this. The first technique +is interactive - this is useful at the prototyping stage, +as you can quickly re-run the script within the same allocation. + +First, request resources: + +```bash +salloc -p YOUR_PARTITION -N 2 +``` + +with whatever other settings you need (follow your institution's documentation). + +Then, view the allocated nodes: + +```bash +squeue -u $USER +``` + +You should then usually be able to directly connect +to one of the allocated nodes: + +```bash +ssh YOUR_NODE_NAME +``` + +On this node, you can now run the script. +When you do this, because you are running interactively, +you will need to declare the number of tasks. Do this +with `SLURM_NTASKS`: + +```bash +SLURM_NTASKS=127 julia --project=. script.jl +``` + +`SLURM_NTASKS` specifies how many worker processes to spawn. +You might also want to declare `JULIA_NUM_THREADS=1` here, +to not overuse resources. + +## Batch Mode, using `sbatch` + +The more common technique is to create a batch script, +which you can then submit to the cluster. + +Let's say we want to create a batch script +on partition `gen`, and allocate 128 SymbolicRegression workers. +For each worker, we'll assign 1 CPU core. +Let's also say that we'll run this job for up to 1 day. + +Create a batch script, `batch.sh`: + +```bash +#!/usr/bin/env bash +#SBATCH -p gen +#SBATCH -n 128 +#SBATCH --cpus-per-task=1 +#SBATCH --time=1-00:00:00 +#SBATCH --job-name=sr_example + +julia --project=. script.jl +``` + +We can simply submit it: + +```bash +sbatch batch.sh +``` + +Slurm will then allocate the necessary nodes automatically. + +## Explicit Worker Management + +For more explicit control over the worker processes, +including the ability to specify multiple functions that +need to be defined on each worker (normally only one level +of functions are passed to each worker), you can pass +a list of processes directly to the `SRRegressor` constructor. + +```julia +using SymbolicRegression + +using Distributed: addprocs, @everywhere +using SlurmClusterManager: SlurmManager +using MLJ: machine, fit! + +num_tasks = parse(Int, ENV["SLURM_NTASKS"]) + +procs = addprocs(SlurmManager()) + +@everywhere begin + # define functions you need on workers + function my_loss(pred, targ) + abs2(pred - targ) + end + + # define any other modules you need on workers + using VectorizationBase +end + +model = SRRegressor(; + #= same as before =# + #= ... =# + + procs=procs, # Pass workers explicitly + addprocs_function=nothing, # <- Only used for SR-managed workers +) +``` diff --git a/docs/src/types.md b/docs/src/types.md index 8a6d78f90..841c3f821 100644 --- a/docs/src/types.md +++ b/docs/src/types.md @@ -9,7 +9,7 @@ as follows. Node ``` -When you create an `Options` object, the operators +When you create an [`Options`](@ref) object, the operators passed are also re-defined for `Node` types. This allows you use, e.g., `t=Node(; feature=1) * 3f0` to create a tree, so long as `*` was specified as a binary operator. This works automatically for @@ -41,7 +41,7 @@ copy_node(tree::Node) ## Expressions -Expressions are represented using the `Expression` type, which combines the raw `Node` type with an `OperatorEnum`. +Expressions are represented using the [`Expression`](@ref) type, which combines the raw [`Node`](@ref) type with an `OperatorEnum`. ```@docs Expression diff --git a/docs/utils.jl b/docs/utils.jl index bcb9b3519..512f6862e 100644 --- a/docs/utils.jl +++ b/docs/utils.jl @@ -32,12 +32,14 @@ function process_block_content(indent, block_content) return "" end indent_length = length(indent) + # Filter out formatter directive lines lines = split(block_content, '\n') + lines = filter(line -> !startswith(strip(line), "#! format:"), lines) stripped_lines = [ - if length(line) > indent_length - line[(indent_length + 1):end] + if startswith(line, indent) + line[(length(indent) + 1):end] # Only remove exactly the indent prefix else - "" + line # Keep the line as is if it doesn't have the expected indent end for line in lines ] return strip(join(stripped_lines, '\n')) diff --git a/examples/custom_types.jl b/examples/custom_types.jl new file mode 100644 index 000000000..6873395cb --- /dev/null +++ b/examples/custom_types.jl @@ -0,0 +1,264 @@ +#! format: off + +#literate_begin file="src/examples/custom_types.md" +#= +# Working with Custom Input Types + +Say that you have some custom input type you want to evolve an expression for. +It doesn't even need to be a numerical type. It could be anything --- even a string! + +Let's actually try this. Let's evolve an _expression over strings_. + +First, we mock up a dataset. Say that we wish to find the expression + +```math +y = \text{zip}( + \text{concat}(x_1, \text{concat}(\text{``abc''}, x_2)), + \text{concat}( + \text{concat}(\text{tail}(x_3), \text{reverse}(x_4)), + \text{``xyz''} + ) +) +``` + +We will define some unary and binary operators on strings: +=# + +using SymbolicRegression +using DynamicExpressions: GenericOperatorEnum +using MLJBase: machine, fit!, report, MLJBase +using Random + +"""Returns the first half of the string.""" +head(s::String) = length(s) == 0 ? "" : join(collect(s)[1:max(1, div(length(s), 2))]) + +"""Returns the second half of the string.""" +tail(s::String) = length(s) == 0 ? "" : join(collect(s)[max(1, div(length(s), 2) + 1):end]) + +"""Concatenates two strings.""" +concat(a::String, b::String) = a * b + +"""Interleaves characters from two strings.""" +function zip(a::String, b::String) + total_length = length(a) + length(b) + result = Vector{Char}(undef, total_length) + i_a = firstindex(a) + i_b = firstindex(b) + i = firstindex(result) + while i <= total_length + if i_a <= lastindex(a) + result[i] = a[i_a] + i += 1 + i_a = nextind(a, i_a) + end + if i_b <= lastindex(b) + result[i] = b[i_b] + i += 1 + i_b = nextind(b, i_b) + end + end + return join(result) +end + +#= +Now, let's use these operators to create a dataset. +=# + +function single_instance(rng=Random.default_rng()) + x_1 = join(rand(rng, 'a':'z', rand(rng, 1:10))) + x_2 = join(rand(rng, 'a':'z', rand(rng, 1:10))) + x_3 = join(rand(rng, 'a':'z', rand(rng, 1:10))) + x_4 = join(rand(rng, 'a':'z', rand(rng, 1:10))) + + ## True formula: + y = zip(x_1 * "abc" * x_2, tail(x_3) * reverse(x_4) * "xyz") + return (; X=(; x_1, x_2, x_3, x_4), y) +end + +dataset = let rng = Random.MersenneTwister(0) + [single_instance(rng) for _ in 1:128] +end + +#= +We'll get them in the right format for MLJ: +=# + +X = [d.X for d in dataset] +y = [d.y for d in dataset]; + +#= +To actually get this working with SymbolicRegression, +there are some key functions we will need to overload. + +First, we say that a single string is one "scalar" constant: +=# + +import DynamicExpressions: count_scalar_constants +count_scalar_constants(::String) = 1 + +#= +Next, we define an initializer (which is normally 0.0 for numeric types). +=# + +import SymbolicRegression: init_value +init_value(::Type{String}) = "" + +#= +Next, we define a random sampler. This is only used for +generating initial random leafs; the `mutate_value` function +is used for mutating them and moving around in the search space. +=# + +using Random: AbstractRNG +import SymbolicRegression: sample_value +sample_value(rng::AbstractRNG, ::Type{String}, _) = join(rand(rng, 'a':'z') for _ in 1:rand(rng, 0:5)) + +#= +We also define a pretty printer for strings, +so it is easier to tell apart variables and operators +from string constants. +=# + +import SymbolicRegression.InterfaceDynamicExpressionsModule: string_constant +function string_constant(val::String, ::Val{precision}, _) where {precision} + val = replace(val, "\"" => "\\\"", "\\" => "\\\\") + return '"' * val * '"' +end + +#= +We also disable constant optimization for strings, +since it is not really defined. If you have a type that you +do want to optimize, you should follow the `DynamicExpressions` +value interface and define the `get_scalar_constants` and `set_scalar_constants!` +functions. +=# + +import SymbolicRegression.ConstantOptimizationModule: can_optimize +can_optimize(::Type{String}, _) = false + +#= +Finally, the most complicated overload for `String` is `mutate_value`, +which we need to define so that any constant value can be iteratively mutated +into any other constant value. + +We also typically want this to depend on the temperature --- lower temperatures +mean a smaller rate of change. You can use temperature as you see fit, or ignore it. +=# + +using SymbolicRegression.UtilsModule: poisson_sample + +import SymbolicRegression: mutate_value + +sample_alphabet(rng::AbstractRNG) = rand(rng, 'a':'z') + +function mutate_value(rng::AbstractRNG, val::String, T, options) + max_length = 10 + lambda_max = 5.0 + λ = max(nextfloat(0.0), lambda_max * clamp(float(T), 0, 1)) + n_edits = clamp(poisson_sample(rng, λ), 0, 10) + chars = collect(val) + ops = rand(rng, (:insert, :delete, :replace, :swap), n_edits) + for op in ops + if op == :insert + insert!(chars, rand(rng, 0:length(chars)) + 1, sample_alphabet(rng)) + elseif op == :delete && !isempty(chars) + deleteat!(chars, rand(rng, eachindex(chars))) + elseif op == :replace + if isempty(chars) + push!(chars, sample_alphabet(rng)) + else + chars[rand(rng, eachindex(chars))] = sample_alphabet(rng) + end + elseif op == :swap && length(chars) >= 2 + i = rand(rng, 1:(length(chars) - 1)) + chars[i], chars[i + 1] = chars[i + 1], chars[i] + end + if length(chars) > max_length + chars = chars[1:max_length] + end + end + return String(chars[1:min(end, max_length)]) +end + +#= +This concludes the custom type interface. Now let's actually use it! + +For the loss function, we will use Levenshtein edit distance. +This lets the evolutionary algorithm gradually change the strings +into the desired output. +=# + +function edit_distance(a::String, b::String)::Float64 + a, b = length(a) >= length(b) ? (a, b) : (b, a) ## Want shorter string to be b + a, b = collect(a), collect(b) ## Convert to vectors for uniform indexing + m, n = length(a), length(b) + + m == 0 && return n + n == 0 && return m + a == b && return 0 + + ## Initialize the previous row (distances from empty string to b[1:j]) + prev = collect(0:n) + curr = similar(prev) + + for i_a in 1:m + curr[1] = i_a + ai = a[i_a] + for i_b in 1:n + cost = ai == b[i_b] ? 0 : 1 + curr[i_b + 1] = min(prev[i_b + 1] + 1, curr[i_b] + 1, prev[i_b] + cost) + end + prev, curr = curr, prev + end + + return Float64(prev[n + 1]) ## Make sure to convert to your `loss_type`! +end + +#= +Next, let's create our regressor object. We pass `binary_operators` +and `unary_operators` as normal, but now we also pass `GenericOperatorEnum`, +because we are dealing with non-numeric types. + +We also need to manually define the `loss_type`, since it's not inferrable from +`loss_type`. +=# +binary_operators = (concat, zip) +unary_operators = (head, tail, reverse) +hparams = (; + batching=true, + batch_size=32, + maxsize=20, + parsimony=0.1, + adaptive_parsimony_scaling=20.0, + mutation_weights=MutationWeights(; mutate_constant=1.0), + early_stop_condition=(l, c) -> l < 1.0 && c <= 15, # src +) +model = SRRegressor(; + binary_operators, + unary_operators, + operator_enum_constructor=GenericOperatorEnum, + elementwise_loss=edit_distance, + loss_type=Float64, + hparams..., +); + +mach = machine(model, X, y; scitype_check_level=0) + +#= +At this point, you would run `fit!(mach)` as usual. +Ignore the MLJ warnings about `scitype`s. +```julia +fit!(mach) +``` +=# + +#literate_end + +using Test + +fit!(mach) + +ŷ = report(mach).equations[end](MLJBase.matrix(X; transpose=true)) +mean_loss = sum(map(edit_distance, y, ŷ)) / length(y) +@test mean_loss <= 8.0 +#! format: on diff --git a/examples/mooncake/Project.toml b/examples/mooncake/Project.toml new file mode 100644 index 000000000..375e59f66 --- /dev/null +++ b/examples/mooncake/Project.toml @@ -0,0 +1,9 @@ +[deps] +DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" +DynamicExpressions = "a40a106e-89c9-4ca8-8020-a735e8728b6b" +MLJBase = "a7f614a8-145f-11e9-1d2a-a57a1082229d" +Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" +SymbolicRegression = "8254be44-1295-4e6a-a16d-46603ac705cb" + +[sources] +SymbolicRegression = {path = "../../"} diff --git a/examples/mooncake/example.jl b/examples/mooncake/example.jl new file mode 100644 index 000000000..661704042 --- /dev/null +++ b/examples/mooncake/example.jl @@ -0,0 +1,17 @@ +using SymbolicRegression, Mooncake, MLJBase, DifferentiationInterface + +X = (; x=randn(100), y=randn(100), z=randn(100), w=randn(100)) +y = @. 2 * cos(X.x) + X.y^2 - 4 * X.z + 3 * X.w + +expression_spec = @template_spec(expressions = (f, g), parameters = (p=1,)) do x, y, z, w + return f(x, y) + g(z) + p[1] * w +end + +model = SRRegressor(; + binary_operators=(+, *, /, -), + unary_operators=(cos, exp), + autodiff_backend=AutoMooncake(; config=nothing), + expression_spec=expression_spec, +) +mach = machine(model, X, y) +fit!(mach) diff --git a/examples/parameterized_function.jl b/examples/parameterized_function.jl index 1c954d5d2..513e7750c 100644 --- a/examples/parameterized_function.jl +++ b/examples/parameterized_function.jl @@ -25,15 +25,12 @@ We will need to simultaneously learn the symbolic expression and per-class param =# using SymbolicRegression using Random: MersenneTwister -using Zygote +using Zygote #src using MLJBase: machine, fit!, predict, report using Test #= Now, we generate synthetic data, with these 2 different classes. - -Note that the `class` feature is given special treatment for the [`SRRegressor`](@ref) -as a categorical variable: =# X = let rng = MersenneTwister(0), n = 30 @@ -51,20 +48,37 @@ end #= ## Setting up the Search -We'll configure the symbolic regression search to: -- Use parameterized expressions with up to 2 parameters -- Use Zygote.jl for automatic differentiation during parameter optimization (important when using parametric expressions, as it is higher dimensional) +We'll configure the symbolic regression search to +use template expressions with parameters that _vary by class_ =# stop_at = Ref(1e-4) #src +# Get number of categories from the data +n_categories = length(unique(X.class)) + +# Create a template expression specification with 2 parameters +expression_spec = @template_spec( + expressions = (f,), parameters = (p1=n_categories, p2=n_categories), +) do x1, x2, class + f(x1, x2, p1[class], p2[class]) +end +test_kwargs = if get(ENV, "SYMBOLIC_REGRESSION_IS_TESTING", "false") == "true" #src + (; #src + expression_spec=ParametricExpressionSpec(; max_parameters=2), #src + autodiff_backend=:Zygote, #src + ) #src +else #src + NamedTuple() #src +end #src + model = SRRegressor(; niterations=100, binary_operators=[+, *, /, -], unary_operators=[cos, exp], populations=30, - expression_spec=ParametricExpressionSpec(; max_parameters=2), - autodiff_backend=:Zygote, + expression_spec=expression_spec, + test_kwargs..., #src early_stop_condition=(loss, _) -> loss < stop_at[], #src ); diff --git a/examples/template_expression_complex.jl b/examples/template_expression_complex.jl index 372ac1c3a..c1fda78f1 100644 --- a/examples/template_expression_complex.jl +++ b/examples/template_expression_complex.jl @@ -250,7 +250,9 @@ ex = TemplateExpression( So we can see that it prints the expression as we've defined it. Now, we can create a regressor that builds template expressions -which follow this structure! +which follow this structure, by defining a `TemplateExpressionSpec` +which wraps the `structure` object. This will result in generating +expressions like the above `ex` object. =# model = SRRegressor(; binary_operators=(+, -, *, /), diff --git a/ext/SymbolicRegressionEnzymeExt.jl b/ext/SymbolicRegressionEnzymeExt.jl index b8b8be60b..f8c9b0a25 100644 --- a/ext/SymbolicRegressionEnzymeExt.jl +++ b/ext/SymbolicRegressionEnzymeExt.jl @@ -10,30 +10,37 @@ using DynamicExpressions: with_contents, get_contents using ADTypes: AutoEnzyme -using Enzyme: autodiff, Reverse, Active, Const, Duplicated +using Enzyme: autodiff, Reverse, Active, Const, Duplicated, make_zero, remake_zero! -import SymbolicRegression.ConstantOptimizationModule: GradEvaluator +import SymbolicRegression.ConstantOptimizationModule: Evaluator, GradEvaluator # We prepare a copy of the tree and all arrays -function GradEvaluator(f::F, backend::AE) where {F,AE<:AutoEnzyme} - storage_tree = copy(f.tree) +function GradEvaluator(f::F, backend::AE) where {F<:Evaluator,AE<:AutoEnzyme} + storage_tree = make_zero(f.tree) _, storage_refs = get_scalar_constants(storage_tree) - storage_dataset = deepcopy(f.dataset) + storage_dataset = make_zero(f.ctx.dataset) + storage_options = make_zero(f.ctx.options) # TODO: It is super inefficient to deepcopy; how can we skip this - return GradEvaluator(f, backend, (; storage_tree, storage_refs, storage_dataset)) + return GradEvaluator( + f, + nothing, + backend, + (; storage_tree, storage_refs, storage_dataset, storage_options), + ) end -function evaluator(tree, dataset, options, idx, output) - output[] = eval_loss(tree, dataset, options; regularization=false, idx=idx) +function evaluator(tree, dataset, options, output) + output[] = eval_loss(tree, dataset, options; regularization=false) return nothing end with_stacksize(f::F, n) where {F} = fetch(schedule(Task(f, n))) function (g::GradEvaluator{<:Any,<:AutoEnzyme})(_, G, x::AbstractVector{T}) where {T} - set_scalar_constants!(g.f.tree, x, g.f.refs) - set_scalar_constants!(g.extra.storage_tree, zero(x), g.extra.storage_refs) - fill!(g.extra.storage_dataset, 0) + set_scalar_constants!(g.e.tree, x, g.e.refs) + remake_zero!(g.extra.storage_tree) + remake_zero!(g.extra.storage_dataset) + remake_zero!(g.extra.storage_options) output = [zero(T)] doutput = [one(T)] @@ -42,10 +49,9 @@ function (g::GradEvaluator{<:Any,<:AutoEnzyme})(_, G, x::AbstractVector{T}) wher autodiff( Reverse, evaluator, - Duplicated(g.f.tree, g.extra.storage_tree), - Duplicated(g.f.dataset, g.extra.storage_dataset), - Const(g.f.options), - Const(g.f.idx), + Duplicated(g.e.tree, g.extra.storage_tree), + Duplicated(g.e.ctx.dataset, g.extra.storage_dataset), + Duplicated(g.e.ctx.options, g.extra.storage_options), Duplicated(output, doutput), ) end diff --git a/ext/SymbolicRegressionMooncakeExt.jl b/ext/SymbolicRegressionMooncakeExt.jl new file mode 100644 index 000000000..f51e07ed6 --- /dev/null +++ b/ext/SymbolicRegressionMooncakeExt.jl @@ -0,0 +1,44 @@ +module SymbolicRegressionMooncakeExt + +using DynamicExpressions: DynamicExpressions as DE +using SymbolicRegression: SymbolicRegression as SR +using SymbolicRegression.ConstantOptimizationModule: count_constants_for_optimization +using Mooncake: Mooncake + +function DE.extract_gradient( + gradient::Mooncake.Tangent, ex::SR.TemplateExpression{T} +) where {T} + n_const = count_constants_for_optimization(ex) + out = Array{T}(undef, n_const) + i = firstindex(out) + for (tree_gradient, tree) in zip(values(gradient.fields.trees), values(ex.trees)) + @assert( + !(tree_gradient isa Mooncake.NoTangent), + "Unexpected input type: $(tree_gradient)::$(typeof(tree_gradient))" + ) + grad_array = DE.extract_gradient(tree_gradient, tree) + @inbounds for g in grad_array + i = DE.pack_scalar_constants!(out, i, g) + end + end + if SR.has_params(ex) + for (param_gradient, param) in zip( + values(gradient.fields.metadata.fields._data.parameters), + values(ex.metadata.parameters), + ) + @assert( + !(param_gradient isa Mooncake.NoTangent), + "Unexpected input type: $(param_gradient)::$(typeof(param_gradient))" + ) + @inbounds for g in param_gradient.fields._data + i = DE.pack_scalar_constants!(out, i, g) + end + end + end + return out +end +function DE.extract_gradient(gradient::Mooncake.Tangent, ex::SR.ComposableExpression) + return DE.extract_gradient(gradient.fields.tree, DE.get_tree(ex)) +end + +end diff --git a/src/CheckConstraints.jl b/src/CheckConstraints.jl index fb0bbb712..b612e6766 100644 --- a/src/CheckConstraints.jl +++ b/src/CheckConstraints.jl @@ -1,39 +1,33 @@ module CheckConstraintsModule using DynamicExpressions: - AbstractExpressionNode, AbstractExpression, get_tree, count_depth, tree_mapreduce + AbstractExpressionNode, + AbstractExpression, + get_tree, + count_depth, + tree_mapreduce, + get_child using ..CoreModule: AbstractOptions using ..ComplexityModule: compute_complexity, past_complexity_limit -# Check if any binary operator are overly complex -function flag_bin_operator_complexity( - tree::AbstractExpressionNode, op, cons, options::AbstractOptions +# Generic operator complexity checking for any degree +function flag_operator_complexity( + tree::AbstractExpressionNode, degree::Int, op::Int, cons, options::AbstractOptions )::Bool - any(tree) do subtree - if subtree.degree == 2 && subtree.op == op - cons[1] > -1 && - past_complexity_limit(subtree.l, options, cons[1]) && - return true - cons[2] > -1 && - past_complexity_limit(subtree.r, options, cons[2]) && - return true - end - return false + return any(tree) do subtree + subtree.degree == degree && + subtree.op == op && + _check_operator_constraints(subtree, degree, cons, options) end end -""" -Check if any unary operators are overly complex. -This assumes you have already checked whether the constraint is > -1. -""" -function flag_una_operator_complexity( - tree::AbstractExpressionNode, op, cons, options::AbstractOptions +function _check_operator_constraints( + node::AbstractExpressionNode, degree::Int, cons, options::AbstractOptions )::Bool - any(tree) do subtree - if subtree.degree == 1 && tree.op == op - past_complexity_limit(subtree.l, options, cons) && return true - end - return false + @assert degree != 0 + + return any(1:degree) do i + cons[i] != -1 && past_complexity_limit(get_child(node, i), options, cons[i]) end end @@ -54,19 +48,18 @@ end """Check if there are any illegal combinations of operators""" function flag_illegal_nests(tree::AbstractExpressionNode, options::AbstractOptions)::Bool # We search from the top first, then from child nodes at end. - (nested_constraints = options.nested_constraints) === nothing && return false - for (degree, op_idx, op_constraint) in nested_constraints - for (nested_degree, nested_op_idx, max_nestedness) in op_constraint - any(tree) do subtree - if subtree.degree == degree && subtree.op == op_idx - nestedness = count_max_nestedness(subtree, nested_degree, nested_op_idx) - return nestedness > max_nestedness + nested_constraints = options.nested_constraints + isnothing(nested_constraints) && return false + any(tree) do subtree + any(nested_constraints) do (degree, op_idx, op_constraints) + subtree.degree == degree && + subtree.op == op_idx && + any(op_constraints) do (nested_degree, nested_op_idx, max_nestedness) + count_max_nestedness(subtree, nested_degree, nested_op_idx) > + max_nestedness end - return false - end && return true end end - return false end """Check if user-passed constraints are satisfied. Returns false otherwise.""" @@ -74,30 +67,26 @@ function check_constraints( ex::AbstractExpression, options::AbstractOptions, maxsize::Int, - cursize::Union{Int,Nothing}=nothing, + cached_size::Union{Int,Nothing}=nothing, )::Bool tree = get_tree(ex) - return check_constraints(tree, options, maxsize, cursize) + return check_constraints(tree, options, maxsize, cached_size) end function check_constraints( tree::AbstractExpressionNode, options::AbstractOptions, maxsize::Int, - cursize::Union{Int,Nothing}=nothing, + cached_size::Union{Int,Nothing}=nothing, )::Bool - ((cursize === nothing) ? compute_complexity(tree, options) : cursize) > maxsize && - return false + @something(cached_size, compute_complexity(tree, options)) > maxsize && return false count_depth(tree) > options.maxdepth && return false - for i in 1:(options.nbin) - cons = options.bin_constraints[i] - cons == (-1, -1) && continue - flag_bin_operator_complexity(tree, i, cons, options) && return false - end - for i in 1:(options.nuna) - cons = options.una_constraints[i] - cons == -1 && continue - flag_una_operator_complexity(tree, i, cons, options) && return false + any_invalid = any(enumerate(options.op_constraints)) do (degree, degree_constraints) + any(enumerate(degree_constraints)) do (op_idx, cons) + any(!=(-1), cons) && + flag_operator_complexity(tree, degree, op_idx, cons, options) + end end + any_invalid && return false flag_illegal_nests(tree, options) && return false return true end diff --git a/src/Complexity.jl b/src/Complexity.jl index 54b101898..a60477342 100644 --- a/src/Complexity.jl +++ b/src/Complexity.jl @@ -51,8 +51,8 @@ function _compute_complexity( t -> t.constant ? cc : vc end end, - let uc = cmap.unaop_complexities, bc = cmap.binop_complexities - t -> t.degree == 1 ? @inbounds(uc[t.op]) : @inbounds(bc[t.op]) + let op_complexities = cmap.op_complexities + t -> @inbounds(op_complexities[t.degree][t.op]) end, +, tree, diff --git a/src/ComposableExpression.jl b/src/ComposableExpression.jl index c23793477..7e3ee3dd0 100644 --- a/src/ComposableExpression.jl +++ b/src/ComposableExpression.jl @@ -187,17 +187,42 @@ function (ex::AbstractComposableExpression)( return x .* nan end end +# Method for all-Number arguments (scalars) +function (ex::AbstractComposableExpression)(x::Number, _xs::Vararg{Number,N}) where {N} + xs = (x, _xs...) + + vectors = ntuple(i -> ValidVector([float(xs[i])], true), length(xs)) + return only(_get_value(ex(vectors...))) +end + function (ex::AbstractComposableExpression)( - x::ValidVector, _xs::Vararg{ValidVector,N} + x::Union{ValidVector,Number}, _xs::Vararg{Union{ValidVector,Number},N} ) where {N} xs = (x, _xs...) - valid = all(_is_valid, xs) - if !valid - return ValidVector(_get_value(first(xs)), false) - else - X = Matrix(stack(map(_get_value, xs))') + sample_vector = + let first_valid_vector_idx = findfirst(arg -> arg isa ValidVector, xs)::Int + xs[first_valid_vector_idx]::ValidVector + end + + # Convert Numbers to ValidVectors based on first ValidVector's size + valid_args = ntuple(length(xs)) do i + arg = xs[i] + if arg isa ValidVector + arg + else + # Convert Number to ValidVector with repeated values + filled_array = similar(sample_vector.x) + fill!(filled_array, arg) + ValidVector(filled_array, true) + end + end + + if all(_is_valid, valid_args) + X = stack(map(_get_value, valid_args); dims=1) eval_options = get_eval_options(ex) return ValidVector(eval_tree_array(ex, X; eval_options)) + else + return ValidVector(_get_value(first(valid_args)), false) end end function (ex::AbstractComposableExpression{T})() where {T} @@ -235,22 +260,94 @@ end # Basically we want to vectorize every single operation on ValidVector, # so that the user can use it easily. +function _apply_operator(op::F, x::Vararg{Any,N}) where {F<:Function,N} + vx = map(_get_value, x) + safe_op = get_safe_op(op) + result = safe_op.(vx...) + return ValidVector(result, is_valid_array(result)) +end + function apply_operator(op::F, x::Vararg{Any,N}) where {F<:Function,N} if all(_is_valid, x) - vx = map(_get_value, x) - safe_op = get_safe_op(op) - result = safe_op.(vx...) - return ValidVector(result, is_valid_array(result)) + return _apply_operator(op, x...) else example_vector = something(map(xi -> xi isa ValidVector ? xi : nothing, x)...)::ValidVector - return ValidVector(_get_value(example_vector), false) + expected_return_type = Base.promote_op( + _apply_operator, typeof(op), map(typeof, x)... + ) + if expected_return_type !== Union{} && + expected_return_type <: ValidVector{<:AbstractArray} + return ValidVector( + _match_eltype(expected_return_type, example_vector.x), false + )::expected_return_type + else + return ValidVector(example_vector.x, false) + end end end _is_valid(x::ValidVector) = x.valid _is_valid(x) = true _get_value(x::ValidVector) = x.x _get_value(x) = x +function _match_eltype( + ::Type{<:ValidVector{<:AbstractArray{T1}}}, x::AbstractArray{T2} +) where {T1,T2} + if T1 == T2 + return x + else + return Base.Fix1(convert, T1).(x) + end +end + +struct ValidVectorMixError <: Exception end +struct ValidVectorAccessError <: Exception end + +function Base.showerror(io::IO, ::ValidVectorMixError) + return print( + io, + """ +ValidVectorMixError: Cannot mix ValidVector with regular Vector. + +ValidVector handles validity checks, auto-vectorization, and batching in template expressions. +The .valid field tracks whether any upstream computation failed (false = failed, true = valid). + +Wrap your vectors in ValidVector: + + ```julia + valid_ar1 = ValidVector(ar1, all(isfinite, ar1)) + valid_ar1 + valid_ar2 + ``` + +Alternatively, you can access the vector from a ValidVector with `my_validvector.x`, +but you must be sure to propagate the `.valid` field. For example: + + ```julia + out = ar1 .+ valid_ar2.x + ValidVector(out, all(isfinite, out) && valid_ar2.valid) + ``` + +""", + ) +end + +function Base.showerror(io::IO, ::ValidVectorAccessError) + return print( + io, + """ +ValidVectorAccessError: ValidVector doesn't support direct array operations. + +Use .x for data and .valid for validity: + + ```julia + valid_ar.x[1] # indexing + length(valid_ar.x) # length + valid_ar.valid # check validity (false = any upstream computation failed) + ``` + +ValidVector handles validity/batching automatically in template expressions.""", + ) +end #! format: off # First, binary operators: @@ -264,6 +361,9 @@ for op in ( Base.$(op)(x::ValidVector, y::ValidVector) = apply_operator(Base.$(op), x, y) Base.$(op)(x::ValidVector, y::Number) = apply_operator(Base.$(op), x, y) Base.$(op)(x::Number, y::ValidVector) = apply_operator(Base.$(op), x, y) + + Base.$(op)(::ValidVector, ::AbstractVector) = throw(ValidVectorMixError()) + Base.$(op)(::AbstractVector, ::ValidVector) = throw(ValidVectorMixError()) end end function Base.literal_pow(::typeof(^), x::ValidVector, ::Val{p}) where {p} @@ -286,4 +386,12 @@ for op in ( end #! format: on +Base.length(::ValidVector) = throw(ValidVectorAccessError()) +Base.push!(::ValidVector, ::Any) = throw(ValidVectorAccessError()) +for op in (:getindex, :size, :append!, :setindex!) + @eval Base.$(op)(::ValidVector, ::Any...) = throw(ValidVectorAccessError()) +end + +# TODO: Support for 3-ary operators + end diff --git a/src/Configure.jl b/src/Configure.jl index d0a31138c..e1c04c7e5 100644 --- a/src/Configure.jl +++ b/src/Configure.jl @@ -1,3 +1,5 @@ +using Random: MersenneTwister + const TEST_TYPE = Float32 function test_operator(@nospecialize(op::Function), x::T, y=nothing) where {T} @@ -33,18 +35,26 @@ precompile(Tuple{typeof(test_operator),Function,Float32}) const TEST_INPUTS = collect(range(-100, 100; length=99)) +function get_test_inputs(::Type{T}, ::AbstractOptions) where {T<:Number} + return Base.Fix1(convert, T).(TEST_INPUTS) +end +function get_test_inputs(::Type{T}, ::AbstractOptions) where {T<:Complex} + return Base.Fix1(convert, T).(TEST_INPUTS .+ TEST_INPUTS .* im) +end +function get_test_inputs(::Type{T}, options::AbstractOptions) where {T} + rng = MersenneTwister(0) + return [sample_value(rng, T, options) for _ in 1:100] +end + function assert_operators_well_defined(T, options::AbstractOptions) - test_input = if T <: Complex - Base.Fix1(convert, T).(TEST_INPUTS .+ TEST_INPUTS .* im) - else - Base.Fix1(convert, T).(TEST_INPUTS) - end + test_input = get_test_inputs(T, options) for x in test_input, y in test_input, op in options.operators.binops test_operator(op, x, y) end for x in test_input, op in options.operators.unaops test_operator(op, x) end + return nothing end # Check for errors before they happen @@ -124,6 +134,7 @@ function move_functions_to_workers( :binops, :elementwise_loss, :early_stop_condition, + :expression_type, :loss_function, :loss_function_expression, :complexity_mapping, @@ -132,19 +143,19 @@ function move_functions_to_workers( for function_set in function_sets if function_set == :unaops ops = options.operators.unaops - example_inputs = (zero(T),) + example_inputs = (init_value(T),) elseif function_set == :binops ops = options.operators.binops - example_inputs = (zero(T), zero(T)) + example_inputs = (init_value(T), init_value(T)) elseif function_set == :elementwise_loss if typeof(options.elementwise_loss) <: SupervisedLoss continue end ops = (options.elementwise_loss,) example_inputs = if is_weighted(dataset) - (zero(T), zero(T), zero(T)) + (init_value(T), init_value(T), init_value(T)) else - (zero(T), zero(T)) + (init_value(T), init_value(T)) end elseif function_set == :early_stop_condition if !(typeof(options.early_stop_condition) <: Function) @@ -152,25 +163,37 @@ function move_functions_to_workers( end ops = (options.early_stop_condition,) example_inputs = (zero(T), 0) + elseif function_set == :expression_type + # Needs to run _before_ using TemplateExpression anywhere, such + # as in `loss_function_expression`! + if isnothing(options.expression_type) + continue + end + if !require_copy_to_workers(options.expression_type) + continue + end + (; ops, example_inputs) = make_example_inputs( + options.expression_type, T, options, dataset + ) elseif function_set == :loss_function - if options.loss_function === nothing + if isnothing(options.loss_function) continue end ops = (options.loss_function,) - example_inputs = (Node(T; val=zero(T)), dataset, options) + example_inputs = ((options.node_type)(T; val=init_value(T)), dataset, options) elseif function_set == :loss_function_expression - if options.loss_function_expression === nothing + if isnothing(options.loss_function_expression) continue end ops = (options.loss_function_expression,) - ex = create_expression(zero(T), options, dataset) + ex = create_expression(init_value(T), options, dataset) example_inputs = (ex, dataset, options) elseif function_set == :complexity_mapping if !(options.complexity_mapping isa Function) continue end ops = (options.complexity_mapping,) - example_inputs = (create_expression(zero(T), options, dataset),) + example_inputs = (create_expression(init_value(T), options, dataset),) else error("Invalid function set: $function_set") end @@ -256,6 +279,7 @@ function import_module_on_workers( :ClusterManagers, :Enzyme, :LoopVectorization, + :Mooncake, :SymbolicUtils, :TensorBoardLogger, :Zygote, @@ -344,6 +368,7 @@ function configure_workers(; procs::Union{Vector{Int},Nothing}, numprocs::Int, addprocs_function::Function, + worker_timeout::Float64, options::AbstractOptions, @nospecialize(worker_imports::Union{Vector{Symbol},Nothing}), project_path, @@ -354,7 +379,9 @@ function configure_workers(; runtests::Bool, ) (procs, we_created_procs) = if procs === nothing - (addprocs_function(numprocs; lazy=false, exeflags), true) + withenv("JULIA_WORKER_TIMEOUT" => string(worker_timeout)) do + (addprocs_function(numprocs; lazy=false, exeflags), true) + end else (procs, false) end diff --git a/src/ConstantOptimization.jl b/src/ConstantOptimization.jl index c4f4365f8..1c6bb01f1 100644 --- a/src/ConstantOptimization.jl +++ b/src/ConstantOptimization.jl @@ -1,9 +1,10 @@ module ConstantOptimizationModule +using Random: AbstractRNG, default_rng using LineSearches: LineSearches using Optim: Optim using ADTypes: AbstractADType, AutoEnzyme -using DifferentiationInterface: value_and_gradient +using DifferentiationInterface: value_and_gradient, prepare_gradient using DynamicExpressions: AbstractExpression, Expression, @@ -11,15 +12,27 @@ using DynamicExpressions: get_scalar_constants, set_scalar_constants!, extract_gradient +using DispatchDoctor: @unstable using ..CoreModule: AbstractOptions, Dataset, DATA_TYPE, LOSS_TYPE, specialized_options, dataset_fraction -using ..UtilsModule: get_birth_order +using ..UtilsModule: get_birth_order, PerTaskCache, stable_get! using ..LossFunctionsModule: eval_loss, loss_to_cost using ..PopMemberModule: AbstractPopMember -function optimize_constants( - dataset::Dataset{T,L}, member::P, options::AbstractOptions +function can_optimize(::AbstractExpression{T}, options) where {T} + return can_optimize(T, options) +end +function can_optimize(::Type{T}, _) where {T<:Number} + return true +end + +@unstable function optimize_constants( + dataset::Dataset{T,L}, + member::P, + options::AbstractOptions; + rng::AbstractRNG=default_rng(), )::Tuple{P,Float64} where {T<:DATA_TYPE,L<:LOSS_TYPE,P<:AbstractPopMember{T,L}} + can_optimize(member.tree, options) || return (member, 0.0) nconst = count_constants_for_optimization(member.tree) nconst == 0 && return (member, 0.0) if nconst == 1 && !(T <: Complex) @@ -30,6 +43,7 @@ function optimize_constants( specialized_options(options), algorithm, options.optimizer_options, + rng, ) end return _optimize_constants( @@ -40,6 +54,7 @@ function optimize_constants( # more particular about dynamic dispatch options.optimizer_algorithm, options.optimizer_options, + rng, ) end @@ -47,14 +62,21 @@ end count_constants_for_optimization(ex::Expression) = count_scalar_constants(ex) function _optimize_constants( - dataset, member::P, options, algorithm, optimizer_options + dataset, member::P, options, algorithm, optimizer_options, rng )::Tuple{P,Float64} where {T,L,P<:AbstractPopMember{T,L}} tree = member.tree - eval_fraction = dataset_fraction(dataset) x0, refs = get_scalar_constants(tree) @assert count_constants_for_optimization(tree) == length(x0) - f = Evaluator(tree, refs, dataset, options) + ctx = EvaluatorContext(dataset, options) + f = Evaluator(tree, refs, ctx) fg! = GradEvaluator(f, options.autodiff_backend) + return _optimize_constants_inner( + f, fg!, x0, refs, dataset, member, options, algorithm, optimizer_options, rng + ) +end +function _optimize_constants_inner( + f::F, fg!::G, x0, refs, dataset, member::P, options, algorithm, optimizer_options, rng +)::Tuple{P,Float64} where {F,G,T,L,P<:AbstractPopMember{T,L}} obj = if algorithm isa Optim.Newton || options.autodiff_backend === nothing f else @@ -62,10 +84,11 @@ function _optimize_constants( end baseline = f(x0) result = Optim.optimize(obj, x0, algorithm, optimizer_options) + eval_fraction = dataset_fraction(dataset) num_evals = result.f_calls * eval_fraction # Try other initial conditions: for _ in 1:(options.optimizer_nrestarts) - eps = randn(T, size(x0)...) + eps = randn(rng, T, size(x0)...) xt = @. x0 * (T(1) + T(1//2) * eps) tmpresult = Optim.optimize(obj, xt, algorithm, optimizer_options) num_evals += tmpresult.f_calls * eval_fraction @@ -77,7 +100,7 @@ function _optimize_constants( end if result.minimum < baseline - member.tree = tree + set_scalar_constants!(member.tree, result.minimizer, refs) member.loss = f(result.minimizer; regularization=true) member.cost = loss_to_cost( member.loss, dataset.use_baseline, dataset.baseline_loss, member, options @@ -85,38 +108,60 @@ function _optimize_constants( member.birth = get_birth_order(; deterministic=options.deterministic) num_evals += eval_fraction else + # Reset to original state set_scalar_constants!(member.tree, x0, refs) end return member, num_evals end -struct Evaluator{N<:AbstractExpression,R,D<:Dataset,O<:AbstractOptions} <: Function - tree::N - refs::R +struct EvaluatorContext{D<:Dataset,O<:AbstractOptions} <: Function dataset::D options::O end +function (c::EvaluatorContext)(tree; regularization=false) + return eval_loss(tree, c.dataset, c.options; regularization) +end + +struct Evaluator{N<:AbstractExpression,R,C<:EvaluatorContext} <: Function + tree::N + refs::R + ctx::C +end function (e::Evaluator)(x::AbstractVector; regularization=false) set_scalar_constants!(e.tree, x, e.refs) - return eval_loss(e.tree, e.dataset, e.options; regularization) + return e.ctx(e.tree; regularization) end -struct GradEvaluator{F<:Evaluator,AD<:Union{Nothing,AbstractADType},EX} <: Function - f::F +struct GradEvaluator{E<:Evaluator,AD<:Union{Nothing,AbstractADType},PR,EX} <: Function + e::E + prep::PR backend::AD extra::EX end -GradEvaluator(f::F, backend::AD) where {F,AD} = GradEvaluator(f, backend, nothing) +@unstable function GradEvaluator(e::Evaluator, backend) + prep = isnothing(backend) ? nothing : _cached_prep(e.ctx, backend, e.tree) + return GradEvaluator(e, prep, backend, nothing) +end + +const CachedPrep = PerTaskCache{Dict{UInt,Any}}() + +@unstable function _cached_prep(ctx, backend, example_tree) + # We avoid hashing on the tree _value_ because it should not + # affect the prep. We want to cache as much as possible! + key = hash((ctx, backend, typeof(example_tree))) + stable_get!(CachedPrep[], key) do + prepare_gradient(ctx, backend, example_tree) + end +end function (g::GradEvaluator{<:Any,AD})(_, G, x::AbstractVector) where {AD} AD isa AutoEnzyme && error("Please load the `Enzyme.jl` package.") - set_scalar_constants!(g.f.tree, x, g.f.refs) - (val, grad) = value_and_gradient(g.backend, g.f.tree) do tree - eval_loss(tree, g.f.dataset, g.f.options; regularization=false) - end + set_scalar_constants!(g.e.tree, x, g.e.refs) + maybe_prep = isnothing(g.prep) ? () : (g.prep,) + (val, grad) = value_and_gradient(g.e.ctx, maybe_prep..., g.backend, g.e.tree) if G !== nothing && grad !== nothing - G .= extract_gradient(grad, g.f.tree) + G .= extract_gradient(grad, g.e.tree) end return val end diff --git a/src/Core.jl b/src/Core.jl index 7a4970239..76f7e0fe4 100644 --- a/src/Core.jl +++ b/src/Core.jl @@ -10,6 +10,7 @@ include("OptionsStruct.jl") include("Operators.jl") include("ExpressionSpec.jl") include("Options.jl") +include("InterfaceDataTypes.jl") using .ProgramConstantsModule: RecordType, DATA_TYPE, LOSS_TYPE using .DatasetModule: @@ -29,7 +30,9 @@ using .OptionsStructModule: Options, ComplexityMapping, specialized_options, - operator_specialization + operator_specialization, + WarmStartIncompatibleError, + check_warm_start_compatibility using .OperatorsModule: get_safe_op, plus, @@ -67,5 +70,6 @@ using .ExpressionSpecModule: get_expression_type, get_expression_options, get_node_type +using .InterfaceDataTypesModule: init_value, sample_value, mutate_value end diff --git a/src/DimensionalAnalysis.jl b/src/DimensionalAnalysis.jl index d469e6848..5690e4a06 100644 --- a/src/DimensionalAnalysis.jl +++ b/src/DimensionalAnalysis.jl @@ -1,6 +1,7 @@ module DimensionalAnalysisModule -using DynamicExpressions: AbstractExpression, AbstractExpressionNode, get_tree +using DynamicExpressions: + AbstractExpression, AbstractExpressionNode, get_tree, get_child, tree_mapreduce using DynamicQuantities: Quantity, DimensionError, AbstractQuantity, constructorof using ..CoreModule: AbstractOptions, Dataset @@ -129,54 +130,89 @@ end ) end end -@inline function deg1_eval( - op::F, l::W -) where {F,T,Q<:AbstractQuantity{T},W<:WildcardQuantity{Q}} - l.violates && return l - !isfinite(l) && return W(one(Q), false, true) +@generated function degn_eval( + op::F, _arg::W, _args::Vararg{W,Nm1} +) where {F,Nm1,T,Q<:AbstractQuantity{T},W<:WildcardQuantity{Q}} + N = Nm1 + 1 + quote + args = (_arg, _args...) + Base.Cartesian.@nextract($N, arg, args) + Base.Cartesian.@nexprs($N, i -> arg_i.violates && return arg_i) + # ^For N = 2: + # ``` + # arg_1.violates && return arg_1 + # arg_2.violates && return arg_2 + # ``` + Base.Cartesian.@nany($N, i -> !isfinite(arg_i)) && return W(one(Q), false, true) + # ^For N = 2: + # ``` + # !isfinite(arg_1) || !isfinite(arg_2) && return W(one(Q), false, true) + # ``` + # COV_EXCL_START + Base.Cartesian.@nexprs( + $(2^N), + i -> begin + # Get indices of N-d matrix of types: + Base.Cartesian.@nexprs( + $N, j -> lattice_j = compute_lattice(Val($N), Val(i), Val(j)) + ) - hasmethod(op, Tuple{W}) && @maybe_return_call(W, op, (l,)) - l.wildcard && return W(Quantity(op(ustrip(l))::T), false, false) - return W(one(Q), false, true) + # (e.g., for N = 3, this would be (0, 0, 0), (0, 0, 1), ..., (1, 1, 1)) + #! format: off + if hasmethod(op, Tuple{Base.Cartesian.@ntuple($N, j -> lattice_j == 0 ? W : T)...}) && + Base.Cartesian.@nall($N, j -> lattice_j == 0 ? true : arg_j.wildcard) + + # if on last one, we always evaluate (assuming wildcards are on): + if i == $(2^N) + return W( + op(Base.Cartesian.@ntuple($N, j -> ustrip(arg_j))...)::T, + false, + false, + ) + else + @maybe_return_call( + W, + op, + Base.Cartesian.@ntuple( + $N, j -> lattice_j == 0 ? arg_j : ustrip(arg_j) + ) + ) + end + end + #! format: on + end + ) + # COV_EXCL_STOP + # ^For N = 2: + # ``` + # hasmethod(op, Tuple{W,W}) && @maybe_return_call(W, op, (arg_1, arg_2)) + # hasmethod(op, Tuple{W,T}) && arg_2.wildcard && @maybe_return_call(W, op, (arg_1, ustrip(arg_2))) + # hasmethod(op, Tuple{T,W}) && arg_1.wildcard && @maybe_return_call(W, op, (ustrip(arg_1), arg_2)) + # hasmethod(op, Tuple{T,T}) && arg_1.wildcard && arg_2.wildcard && W(op(ustrip(arg_1), ustrip(arg_2))::T, false, false) + # ``` + return W(one(Q), false, true) + end end -@inline function deg2_eval( - op::F, l::W, r::W -) where {F,T,Q<:AbstractQuantity{T},W<:WildcardQuantity{Q}} - l.violates && return l - r.violates && return r - (!isfinite(l) || !isfinite(r)) && return W(one(Q), false, true) - hasmethod(op, Tuple{W,W}) && @maybe_return_call(W, op, (l, r)) - hasmethod(op, Tuple{T,W}) && l.wildcard && @maybe_return_call(W, op, (ustrip(l), r)) - hasmethod(op, Tuple{W,T}) && r.wildcard && @maybe_return_call(W, op, (l, ustrip(r))) - l.wildcard && - r.wildcard && - return W(Quantity(op(ustrip(l), ustrip(r))::T), false, false) - return W(one(Q), false, true) +@generated function compute_lattice(::Val{N}, ::Val{i}, ::Val{j}) where {N,i,j} + return div(i - 1, (2^(N - j))) % 2 end function violates_dimensional_constraints_dispatch( - tree::AbstractExpressionNode{T}, + tree::AbstractExpressionNode{T,D}, x_units::Vector{Q}, x::AbstractVector{T}, operators, allow_wildcards, -) where {T,Q<:AbstractQuantity{T}} - if tree.degree == 0 - return deg0_eval(x, x_units, tree, allow_wildcards)::WildcardQuantity{Q} - elseif tree.degree == 1 - l = violates_dimensional_constraints_dispatch( - tree.l, x_units, x, operators, allow_wildcards - ) - return deg1_eval((@inbounds operators.unaops[tree.op]), l)::WildcardQuantity{Q} - else - l = violates_dimensional_constraints_dispatch( - tree.l, x_units, x, operators, allow_wildcards - ) - r = violates_dimensional_constraints_dispatch( - tree.r, x_units, x, operators, allow_wildcards - ) - return deg2_eval((@inbounds operators.binops[tree.op]), l, r)::WildcardQuantity{Q} - end +) where {T,Q<:AbstractQuantity{T},D} + #! format: off + return tree_mapreduce( + leaf -> deg0_eval(x, x_units, leaf, allow_wildcards)::WildcardQuantity{Q}, + branch -> branch, + (branch, children...) -> degn_eval((@inbounds operators.ops[branch.degree][branch.op]), children...)::WildcardQuantity{Q}, + tree; + break_sharing=Val(true), + ) + #! format: on end """ diff --git a/src/ExpressionBuilder.jl b/src/ExpressionBuilder.jl index d4f926733..61caf3d0e 100644 --- a/src/ExpressionBuilder.jl +++ b/src/ExpressionBuilder.jl @@ -17,7 +17,7 @@ import DynamicExpressions: get_operators import ..CoreModule: create_expression @unstable function create_expression( - t::T, options::AbstractOptions, dataset::Dataset{T,L}, ::Val{embed}=Val(false) + t::T, options::AbstractOptions, dataset::Dataset{T,L}, (::Val{embed})=Val(false) ) where {T,L,embed} return create_expression( t, options, dataset, options.node_type, options.expression_type, Val(embed) @@ -27,7 +27,7 @@ end t::AbstractExpressionNode{T}, options::AbstractOptions, dataset::Dataset{T,L}, - ::Val{embed}=Val(false), + (::Val{embed})=Val(false), ) where {T,L,embed} return create_expression( t, options, dataset, options.node_type, options.expression_type, Val(embed) @@ -37,7 +37,7 @@ function create_expression( ex::AbstractExpression{T}, options::AbstractOptions, ::Dataset{T,L}, - ::Val{embed}=Val(false), + (::Val{embed})=Val(false), ) where {T,L,embed} return ex::options.expression_type end @@ -47,7 +47,7 @@ end dataset::Dataset{T,L}, ::Type{N}, ::Type{E}, - ::Val{embed}=Val(false), + (::Val{embed})=Val(false), ) where {T,L,embed,N<:AbstractExpressionNode,E<:AbstractExpression} return create_expression(constructorof(N)(; val=t), options, dataset, N, E, Val(embed)) end @@ -57,7 +57,7 @@ end dataset::Dataset{T,L}, ::Type{<:AbstractExpressionNode}, ::Type{E}, - ::Val{embed}=Val(false), + (::Val{embed})=Val(false), ) where {T,L,embed,E<:AbstractExpression} return constructorof(E)(t; init_params(options, dataset, nothing, Val(embed))...) end diff --git a/src/HallOfFame.jl b/src/HallOfFame.jl index 26b78b2f1..474eadc40 100644 --- a/src/HallOfFame.jl +++ b/src/HallOfFame.jl @@ -1,17 +1,17 @@ module HallOfFameModule -using DispatchDoctor: @unstable -using StyledStrings: styled +using StyledStrings: @styled_str using DynamicExpressions: AbstractExpression, string_tree using ..UtilsModule: split_string, AnnotatedIOBuffer, dump_buffer -using ..CoreModule: AbstractOptions, Dataset, DATA_TYPE, LOSS_TYPE, relu, create_expression +using ..CoreModule: + AbstractOptions, Dataset, DATA_TYPE, LOSS_TYPE, relu, create_expression, init_value using ..ComplexityModule: compute_complexity using ..PopMemberModule: AbstractPopMember, PopMember using ..InterfaceDynamicExpressionsModule: format_dimensions, WILDCARD_UNIT_STRING using Printf: @sprintf """ - HallOfFame{T<:DATA_TYPE,L<:LOSS_TYPE} + HallOfFame{T<:DATA_TYPE,L<:LOSS_TYPE,N<:AbstractExpression{T}} List of the best members seen all time in `.members`, with `.members[c]` being the best member seen at complexity c. Including only the members which actually @@ -19,7 +19,7 @@ have been set, you can run `.members[exists]`. # Fields -- `members::Array{PopMember{T,L},1}`: List of the best members seen all time. +- `members::Array{PopMember{T,L,N},1}`: List of the best members seen all time. These are ordered by complexity, with `.members[1]` the member with complexity 1. - `exists::Array{Bool,1}`: Whether the member at the given complexity has been set. """ @@ -49,6 +49,9 @@ function Base.show(io::IO, mime::MIME"text/plain", hof::HallOfFame{T,L,N}) where end return nothing end +function Base.eltype(::Union{HOF,Type{HOF}}) where {T,L,N,HOF<:HallOfFame{T,L,N}} + return PopMember{T,L,N} +end """ HallOfFame(options::AbstractOptions, dataset::Dataset{T,L}) where {T<:DATA_TYPE,L<:LOSS_TYPE} @@ -66,7 +69,7 @@ Arguments: function HallOfFame( options::AbstractOptions, dataset::Dataset{T,L} ) where {T<:DATA_TYPE,L<:LOSS_TYPE} - base_tree = create_expression(zero(T), options, dataset) + base_tree = create_expression(init_value(T), options, dataset) return HallOfFame{T,L,typeof(base_tree),PopMember{T,L,typeof(base_tree)}}( [ @@ -92,7 +95,7 @@ end """ calculate_pareto_frontier(hallOfFame::HallOfFame{T,L,P}) where {T<:DATA_TYPE,L<:LOSS_TYPE} """ -@unstable function calculate_pareto_frontier(hallOfFame::HallOfFame{T,L,N}) where {T,L,N} +function calculate_pareto_frontier(hallOfFame::HallOfFame{T,L,N}) where {T,L,N} # TODO - remove dataset from args. # Dominating pareto curve - must be better than all simpler equations dominating = similar(hallOfFame.members, 0) @@ -121,57 +124,54 @@ end return dominating end -# const HEADER = let -# join( -# ( -# rpad(styled"{bold:{underline:Complexity}}", 10), -# rpad(styled"{bold:{underline:Loss}}", 9), -# rpad(styled"{bold:{underline:Score}}", 9), -# styled"{bold:{underline:Equation}}", -# ), -# " ", -# ) -# end +let header_parts = ( + rpad(styled"{bold:{underline:Complexity}}", 10), + rpad(styled"{bold:{underline:Loss}}", 9), + rpad(styled"{bold:{underline:Score}}", 9), + styled"{bold:{underline:Equation}}", + ) + @eval const HEADER = join($(header_parts), " ") + @eval const HEADER_WITHOUT_SCORE = join($(header_parts[[1, 2, 4]]), " ") +end -_fmt(x::Integer) = @sprintf("%-10d", x) -_fmt(x::AbstractFloat) = @sprintf("%-8.3e", x) -_fmt(x) = rpad(string(x), 12) # fallback +show_score_column(options::AbstractOptions) = options.loss_scale == :log function string_dominating_pareto_curve( hallOfFame, dataset, options; width::Union{Integer,Nothing}=nothing, pretty::Bool=true ) terminal_width = (width === nothing) ? 100 : max(100, width::Integer) - formatted = format_hall_of_fame(hallOfFame, options) - stat_cols = collect(propertynames(formatted)) - filter!(c -> c ≠ :trees, stat_cols) - priority = [:complexity, :loss, :score] - stat_cols = vcat(intersect(priority, stat_cols), setdiff(stat_cols, priority)) - header_cells = [ - rpad(styled("{bold:{underline:$(titlecase(string(c)))}}"), 12) for c in stat_cols - ] - push!(header_cells, styled("{bold:{underline:Equation}}")) - header = join(header_cells, " ") - - _buffer = IOBuffer() - buffer = AnnotatedIOBuffer(_buffer) + buffer = AnnotatedIOBuffer(IOBuffer()) println(buffer, '─'^(terminal_width - 1)) - println(buffer, header) - for i in 1:length(formatted.trees) - stats = join((_fmt(getfield(formatted, c)[i]) for c in stat_cols), " ") - print(buffer, stats) - eqn = string_tree( - formatted.trees[i], + if show_score_column(options) + println(buffer, HEADER) + else + println(buffer, HEADER_WITHOUT_SCORE) + end + + formatted = format_hall_of_fame(hallOfFame, options) + for (tree, score, loss, complexity) in + zip(formatted.trees, formatted.scores, formatted.losses, formatted.complexities) + eqn_string = string_tree( + tree, options; display_variable_names=dataset.display_variable_names, X_sym_units=dataset.X_sym_units, y_sym_units=dataset.y_sym_units, pretty, ) - prefix = make_prefix(formatted.trees[i], options, dataset) + prefix = make_prefix(tree, options, dataset) + eqn_string = prefix * eqn_string + stats_columns_string = if show_score_column(options) + @sprintf("%-10d %-8.3e %-8.3e ", complexity, loss, score) + else + @sprintf("%-10d %-8.3e ", complexity, loss) + end + left_cols_width = length(stats_columns_string) + print(buffer, stats_columns_string) print( buffer, wrap_equation_string( - prefix * eqn, length(stats) + length(prefix) + 2, terminal_width + eqn_string, left_cols_width + length(prefix), terminal_width ), ) end @@ -191,8 +191,8 @@ end function wrap_equation_string(eqn_string, left_cols_width, terminal_width) dots = "..." equation_width = (terminal_width - 1) - left_cols_width - length(dots) - _buffer = IOBuffer() - buffer = AnnotatedIOBuffer(_buffer) + + buffer = AnnotatedIOBuffer(IOBuffer()) forced_split_eqn = split(eqn_string, '\n') print_pad = false @@ -215,69 +215,65 @@ function wrap_equation_string(eqn_string, left_cols_width, terminal_width) return dump_buffer(buffer) end -@unstable function format_hall_of_fame( - hof::HallOfFame{T,L,N,PM}, - options; - columns::Union{Vector{Symbol},Nothing}=[:losses, :complexities, :scores, :trees], -) where {T,L,N,PM<:PopMember{T,L,N}} +function format_hall_of_fame(hof::HallOfFame{T,L}, options) where {T,L} dominating = calculate_pareto_frontier(hof) - foreach(dominating) do member + + # Only check for negative losses if using logarithmic scaling + options.loss_scale == :log && for member in dominating if member.loss < 0.0 throw( DomainError( member.loss, - "Your loss function must be non-negative. To do this, consider wrapping your loss inside an exponential, which will not affect the search (unless you are using annealing).", + "Your loss function must be non-negative. To allow negative losses, set the `loss_scale` to linear, or consider wrapping your loss inside an exponential.", ), ) end end - member_fields = if length(dominating) == 0 - Union{}[] - else - collect(propertynames(first(dominating))) - end - filter!(f -> f != :tree && f != :loss, member_fields) - coldata = Dict{Symbol,Any}() - coldata[:trees] = [member.tree for member in dominating] - coldata[:losses] = [member.loss for member in dominating] + trees = [member.tree for member in dominating] + losses = [member.loss for member in dominating] + complexities = [compute_complexity(member, options) for member in dominating] + scores = Array{L}(undef, length(dominating)) - for f in member_fields - coldata[f] = [getfield(m, f) for m in dominating] - end - coldata[:complexities] = [compute_complexity(m, options) for m in dominating] - ZERO_POINT = eps(L) cur_loss = typemax(L) last_loss = cur_loss - last_complexity = 0 + last_complexity = zero(eltype(complexities)) - coldata[:scores] = Vector{L}(undef, length(dominating)) - for i in eachindex(dominating) - complexity = coldata[:complexities][i] - cur_loss = coldata[:losses][i] + for i in 1:length(dominating) + complexity = complexities[i] + cur_loss = losses[i] delta_c = complexity - last_complexity - delta_l_mse = log(relu(cur_loss / last_loss) + ZERO_POINT) - coldata[:scores][i] = relu(-delta_l_mse / delta_c) - last_loss = cur_loss - last_complexity = complexity - end - # For coldata, only keep the columns that are in `columns` - if columns !== nothing - for c in keys(coldata) - if !(c in columns) - delete!(coldata, c) + scores[i] = if i == 1 + zero(L) + else + if options.loss_scale == :linear + compute_direct_score(cur_loss, last_loss, delta_c) + else + compute_zero_centered_score(cur_loss, last_loss, delta_c) end end + last_loss = cur_loss + last_complexity = complexity end - return NamedTuple(coldata) + return (; trees, scores, losses, complexities) +end +function compute_direct_score(cur_loss, last_loss, delta_c) + delta = cur_loss - last_loss + return relu(-delta / delta_c) +end +function compute_zero_centered_score(cur_loss, last_loss, delta_c) + log_ratio = log(relu(cur_loss / last_loss) + eps(cur_loss)) + return relu(-log_ratio / delta_c) end -@unstable function format_hall_of_fame(hof::AbstractVector{<:HallOfFame}, options) +function format_hall_of_fame(hof::AbstractVector{<:HallOfFame}, options) outs = [format_hall_of_fame(h, options) for h in hof] - isempty(outs) && return NamedTuple() - ks = propertynames(first(outs)) - vals = map(k -> [getfield(o, k) for o in outs], ks) - return NamedTuple{ks}(vals) + return (; + trees=[out.trees for out in outs], + scores=[out.scores for out in outs], + losses=[out.losses for out in outs], + complexities=[out.complexities for out in outs], + ) end # TODO: Re-use this in `string_dominating_pareto_curve` diff --git a/src/InterfaceDataTypes.jl b/src/InterfaceDataTypes.jl new file mode 100644 index 000000000..37be1afc7 --- /dev/null +++ b/src/InterfaceDataTypes.jl @@ -0,0 +1,34 @@ +module InterfaceDataTypesModule + +using Random: AbstractRNG + +""" + init_value(::Type) + +Return a zero value, or other trivial initalized value for the given type. +""" +init_value(::Type{T}) where {T<:Number} = zero(T) +function init_value(::Type{T}) where {T} + return error("No `init_value` method defined for type $T. Please define one.") +end + +""" + sample_value(::Type, options::AbstractOptions) + +Return a random value of the given type. +""" +sample_value(rng::AbstractRNG, ::Type{T}, _) where {T<:Number} = randn(rng, T) +function sample_value(::AbstractRNG, ::Type{T}, _) where {T} + return error("No `sample_value` method defined for type $T. Please define one.") +end + +""" + mutate_value(rng::AbstractRNG, val, temperature, options) + +Return a mutated value of the given type. +""" +function mutate_value(::AbstractRNG, ::T, _, _) where {T} + return error("No `mutate_value` method defined for type $T. Please define one.") +end + +end diff --git a/src/InterfaceDynamicExpressions.jl b/src/InterfaceDynamicExpressions.jl index 22cbf3ad5..5a96addcb 100644 --- a/src/InterfaceDynamicExpressions.jl +++ b/src/InterfaceDynamicExpressions.jl @@ -5,6 +5,7 @@ using DispatchDoctor: @stable using Compat: Fix using DynamicExpressions: DynamicExpressions as DE, + AbstractOperatorEnum, OperatorEnum, GenericOperatorEnum, AbstractExpression, @@ -14,9 +15,13 @@ using DynamicExpressions: EvalOptions using DynamicQuantities: dimension, ustrip using ..CoreModule: AbstractOptions, Dataset -using ..CoreModule.OptionsModule: inverse_binopmap, inverse_unaopmap +using ..CoreModule.OptionsModule: inverse_opmap using ..UtilsModule: subscriptify +takes_eval_options(::Type{<:AbstractOperatorEnum}) = false +takes_eval_options(::Type{<:OperatorEnum}) = true +takes_eval_options(::T) where {T} = takes_eval_options(T) + """ eval_tree_array(tree::Union{AbstractExpression,AbstractExpressionNode}, X::AbstractArray, options::AbstractOptions; kws...) @@ -62,12 +67,18 @@ which speed up evaluation significantly. kws..., ) A = expected_array_type(X, typeof(tree)) - eval_options = EvalOptions(; - turbo=something(turbo, options.turbo), bumper=something(bumper, options.bumper) - ) - out, complete = DE.eval_tree_array( - tree, X, DE.get_operators(tree, options); eval_options, kws... - ) + operators = DE.get_operators(tree, options) + eval_options_kws = if takes_eval_options(operators) + (; + eval_options=EvalOptions(; + turbo=something(turbo, options.turbo), + bumper=something(bumper, options.bumper), + ) + ) + else + NamedTuple() + end + out, complete = DE.eval_tree_array(tree, X, operators; eval_options_kws..., kws...) if isnothing(out) return nothing, false else @@ -327,13 +338,17 @@ macro extend_operators(options) $(DE).@extend_operators $alias_operators end |> esc end -function define_alias_operators(operators) +function define_alias_operators( + @nospecialize(operators::Union{OperatorEnum,GenericOperatorEnum}) +) # We undo some of the aliases so that the user doesn't need to use, e.g., # `safe_pow(x1, 1.5)`. They can use `x1 ^ 1.5` instead. constructor = isa(operators, OperatorEnum) ? OperatorEnum : GenericOperatorEnum + @assert operators.ops isa Tuple{Vararg{Any,2}} + # TODO: Support for 3-ary operators return constructor(; - binary_operators=inverse_binopmap.(operators.binops), - unary_operators=inverse_unaopmap.(operators.unaops), + binary_operators=map(inverse_opmap, operators.ops[2]), + unary_operators=map(inverse_opmap, operators.ops[1]), define_helper_functions=false, empty_old_operators=false, ) @@ -364,4 +379,18 @@ end # Allows special handling of class columns in MLJInterface.jl handles_class_column(::Type{<:AbstractExpression}) = false +# These functions allow you to declare functions that must be +# passed to worker nodes explicitly. See TemplateExpressions.jl for +# an example. This is used inside Configure.jl. +# COV_EXCL_START +require_copy_to_workers(::Type{<:AbstractExpression}) = false +function make_example_inputs( + ::Type{<:AbstractExpression}, ::Type{T}, options, dataset +) where {T} + return error( + "`make_example_inputs` is not implemented for `$(typeof(options.expression_type))`." + ) +end +# COV_EXCL_STOP + end diff --git a/src/Logging.jl b/src/Logging.jl index e870f7924..848daf4df 100644 --- a/src/Logging.jl +++ b/src/Logging.jl @@ -134,7 +134,9 @@ function _log_scalars(; out["summaries"] = Dict([ "min_loss" => length(dominating) > 0 ? dominating[end].loss : L(Inf), - "pareto_volume" => pareto_volume(losses, complexities, options.maxsize), + "pareto_volume" => pareto_volume( + losses, complexities, options.maxsize, options.loss_scale == :linear + ), ]) #### Full Pareto front @@ -152,22 +154,25 @@ function _log_scalars(; return out end -function pareto_volume(losses::AbstractVector{L}, complexities, maxsize::Int) where {L} +function pareto_volume( + losses::AbstractVector{L}, complexities, maxsize::Int, use_linear_scaling::Bool +) where {L} if length(losses) == 0 return 0.0 end - log_losses = @. log10(losses + eps(L)) - log_complexities = @. log10(complexities) + + y = use_linear_scaling ? copy(losses) : @.(log10(losses + eps(L))) + x = @. log10(complexities) # Add a point equal to the best loss and largest possible complexity, + 1 - push!(log_losses, minimum(log_losses)) - push!(log_complexities, log10(maxsize + 1)) + push!(y, minimum(y)) + push!(x, log10(maxsize + 1)) # Add a point to connect things: - push!(log_losses, maximum(log_losses)) - push!(log_complexities, maximum(log_complexities)) + push!(y, maximum(y)) + push!(x, maximum(x)) - xy = cat(log_complexities, log_losses; dims=2) + xy = cat(x, y; dims=2) hull = convex_hull(xy) return Float64(convex_hull_area(hull)) end diff --git a/src/LossFunctions.jl b/src/LossFunctions.jl index c83e27e14..ea71193cd 100644 --- a/src/LossFunctions.jl +++ b/src/LossFunctions.jl @@ -13,7 +13,8 @@ using ..CoreModule: LOSS_TYPE, is_weighted, get_indices, - get_full_dataset + get_full_dataset, + init_value using ..ComplexityModule: compute_complexity using ..DimensionalAnalysisModule: violates_dimensional_constraints using ..InterfaceDynamicExpressionsModule: expected_array_type @@ -218,7 +219,7 @@ Update the baseline loss of the dataset using the loss function specified in `op function update_baseline_loss!( dataset::Dataset{T,L}, options::AbstractOptions ) where {T<:DATA_TYPE,L<:LOSS_TYPE} - example_tree = create_expression(zero(T), options, dataset) + example_tree = create_expression(init_value(T), options, dataset) # constructorof(options.node_type)(T; val=dataset.avg_y) # TODO: It could be that the loss function is not defined for this example type? baseline_loss = eval_loss(example_tree, dataset, options) diff --git a/src/MLJInterface.jl b/src/MLJInterface.jl index cd8656956..2f6e441fc 100644 --- a/src/MLJInterface.jl +++ b/src/MLJInterface.jl @@ -10,6 +10,7 @@ using DynamicExpressions: string_tree, AbstractExpressionNode, AbstractExpression, + AbstractOperatorEnum, Node, Expression, default_node_type, @@ -36,7 +37,8 @@ using ..CoreModule: ComplexityMapping, AbstractExpressionSpec, ExpressionSpec, - get_expression_type + get_expression_type, + check_warm_start_compatibility using ..CoreModule.OptionsModule: DEFAULT_OPTIONS, OPTION_DESCRIPTIONS using ..ComplexityModule: compute_complexity using ..HallOfFameModule: HallOfFame, format_hall_of_fame @@ -68,6 +70,7 @@ function modelexpr( parent_type::Symbol=:AbstractSymbolicRegressor; default_niterations=100, ) + #! format: off struct_def = :(Base.@kwdef mutable struct $(model_name){D<:AbstractDimensions,L} <: $parent_type niterations::Int = $(default_niterations) @@ -76,14 +79,17 @@ function modelexpr( procs::Union{Vector{Int},Nothing} = nothing addprocs_function::Union{Function,Nothing} = nothing heap_size_hint_in_bytes::Union{Integer,Nothing} = nothing + worker_timeout::Union{Real,Nothing} = nothing worker_imports::Union{Vector{Symbol},Nothing} = nothing logger::Union{AbstractSRLogger,Nothing} = nothing runtests::Bool = true run_id::Union{String,Nothing} = nothing loss_type::Type{L} = Nothing + guesses::Union{AbstractVector,AbstractVector{<:AbstractVector},Nothing} = nothing selection_method::Function = choose_best dimensions_type::Type{D} = SymbolicDimensions{DEFAULT_DIM_BASE_TYPE} end) + #! format: on # TODO: store `procs` from initial run if parallelism is `:multiprocessing` fields = last(last(struct_def.args).args).args @@ -195,7 +201,12 @@ function full_report( nothing end best_idx = dispatch_selection_for( - m, formatted.trees, formatted.losses, formatted.scores, formatted.complexities + m, + formatted.trees, + formatted.losses, + formatted.scores, + formatted.complexities, + fitresult.options, ) return (; best_idx=best_idx, @@ -222,7 +233,10 @@ function MMI.update( y, w=nothing, ) - options = old_fitresult === nothing ? get_options(m) : old_fitresult.options + options = get_options(m) + if !isnothing(old_fitresult) + check_warm_start_compatibility(old_fitresult.options, options) + end return _update(m, verbosity, old_fitresult, old_cache, X, y, w, options, nothing) end function _update( @@ -291,6 +305,7 @@ function _update( procs=m.procs, addprocs_function=m.addprocs_function, heap_size_hint_in_bytes=m.heap_size_hint_in_bytes, + worker_timeout=m.worker_timeout, worker_imports=m.worker_imports, runtests=m.runtests, saved_state=(old_fitresult === nothing ? nothing : old_fitresult.state), @@ -302,6 +317,7 @@ function _update( verbosity=verbosity, extra=isnothing(class) ? (;) : (; class), logger=m.logger, + guesses=m.guesses, # Help out with inference: v_dim_out=isa(m, AbstractSingletargetSRRegressor) ? Val(1) : Val(2), ) @@ -315,7 +331,7 @@ function _update( variable_names=variable_names, y_variable_names=y_variable_names, y_is_table=MMI.istable(y), - has_class=!isnothing(class), + has_class=(!isnothing(class)), X_units=X_units_clean, y_units=y_units_clean, types=SRFitResultTypes(; @@ -592,11 +608,17 @@ function get_equation_strings_for( ] end -function choose_best(; trees, losses::Vector{L}, scores, complexities) where {L<:LOSS_TYPE} +function choose_best(; + trees, losses::Vector{L}, scores, complexities, options=nothing +) where {L<:LOSS_TYPE} # Same as in PySR: # https://github.com/MilesCranmer/PySR/blob/e74b8ad46b163c799908b3aa4d851cf8457c79ef/pysr/sr.py#L2318-L2332 # threshold = 1.5 * minimum_loss # Then, we get max score of those below the threshold. + if !isnothing(options) && options.loss_scale == :linear + return argmin(losses) + end + threshold = 1.5 * minimum(losses) return argmax([ (losses[i] <= threshold) ? scores[i] : typemin(L) for i in eachindex(losses) @@ -604,20 +626,22 @@ function choose_best(; trees, losses::Vector{L}, scores, complexities) where {L< end function dispatch_selection_for( - m::AbstractSingletargetSRRegressor, trees, losses, scores, complexities + m::AbstractSingletargetSRRegressor, trees, losses, scores, complexities, options )::Int length(trees) == 0 && return 0 - return m.selection_method(; - trees=trees, losses=losses, scores=scores, complexities=complexities - ) + return m.selection_method(; trees, losses, scores, complexities, options) end function dispatch_selection_for( - m::AbstractMultitargetSRRegressor, trees, losses, scores, complexities + m::AbstractMultitargetSRRegressor, trees, losses, scores, complexities, options ) any(t -> length(t) == 0, trees) && return fill(0, length(trees)) return [ m.selection_method(; - trees=trees[i], losses=losses[i], scores=scores[i], complexities=complexities[i] + trees=trees[i], + losses=losses[i], + scores=scores[i], + complexities=complexities[i], + options, ) for i in eachindex(trees) ] end @@ -647,7 +671,7 @@ for model in [:SRRegressor, :SRTestRegressor] target_scitype=AbstractVector{<:MMI.Continuous}, supports_weights=true, reports_feature_importances=false, - load_path=$("SymbolicRegression.MLJInterfaceModule." * string(model)), + load_path=($("SymbolicRegression.MLJInterfaceModule." * string(model))), human_name="Symbolic Regression via Evolutionary Search", ) end @@ -662,7 +686,7 @@ for model in [:MultitargetSRRegressor, :MultitargetSRTestRegressor] }, supports_weights=true, reports_feature_importances=false, - load_path=$("SymbolicRegression.MLJInterfaceModule." * string(model)), + load_path=($("SymbolicRegression.MLJInterfaceModule." * string(model))), human_name="Multi-Target Symbolic Regression via Evolutionary Search", ) end @@ -893,8 +917,12 @@ eval( replace( """ Multi-target Symbolic Regression regressor (`MultitargetSRRegressor`) - conducts several searches for expressions that predict each target variable - from a set of input variables. All data is assumed to be `Continuous`. + searches for expressions that predict each target variable from a set + of input variables. This simply runs independent [`SRRegressor`](@ref) + searches for each target column in parallel - there is no joint modeling + of targets. All configuration options work identically to `SRRegressor`. + + All data is assumed to be `Continuous`. The search is performed using an evolutionary algorithm. This algorithm is described in the paper https://arxiv.org/abs/2305.01582. diff --git a/src/Mutate.jl b/src/Mutate.jl index 639414206..480d550ed 100644 --- a/src/Mutate.jl +++ b/src/Mutate.jl @@ -26,6 +26,7 @@ using ..PopMemberModule: AbstractPopMember, PopMember using ..MutationFunctionsModule: mutate_constant, mutate_operator, + mutate_feature, swap_operands, append_random_op, prepend_random_op, @@ -83,7 +84,7 @@ struct MutationResult{N<:AbstractExpression,P<:AbstractPopMember} <: end """ - condition_mutation_weights!(weights::AbstractMutationWeights, member::PopMember, options::AbstractOptions, curmaxsize::Int) + condition_mutation_weights!(weights::AbstractMutationWeights, member::PopMember, options::AbstractOptions, curmaxsize::Int, nfeatures::Int) Adjusts the mutation weights based on the properties of the current member and options. @@ -96,9 +97,14 @@ Note that the weights were already copied, so you don't need to worry about muta - `member::PopMember`: The current population member being mutated. - `options::AbstractOptions`: The options that guide the mutation process. - `curmaxsize::Int`: The current maximum size constraint for the member's expression tree. +- `nfeatures::Int`: The number of features available in the dataset. """ function condition_mutation_weights!( - weights::AbstractMutationWeights, member::P, options::AbstractOptions, curmaxsize::Int + weights::AbstractMutationWeights, + member::P, + options::AbstractOptions, + curmaxsize::Int, + nfeatures::Int, ) where {T,L,N<:AbstractExpression,P<:AbstractPopMember{T,L,N}} tree = get_tree(member.tree) if !preserve_sharing(typeof(member.tree)) @@ -115,6 +121,8 @@ function condition_mutation_weights!( if !tree.constant weights.optimize = 0.0 weights.mutate_constant = 0.0 + else + weights.mutate_feature = 0.0 end return nothing end @@ -126,6 +134,11 @@ function condition_mutation_weights!( condition_mutate_constant!(typeof(member.tree), weights, member, options, curmaxsize) + # Disable feature mutation if only one feature available + if nfeatures <= 1 + weights.mutate_feature = 0.0 + end + complexity = compute_complexity(member, options) if complexity >= curmaxsize @@ -180,7 +193,7 @@ end weights = copy(options.mutation_weights) - condition_mutation_weights!(weights, member, options, curmaxsize) + condition_mutation_weights!(weights, member, options, curmaxsize, nfeatures) mutation_choice = sample_mutation(weights) @@ -281,6 +294,7 @@ end probChange = 1.0 if options.annealing + # TODO: Try using log(after_cost) - log(before_cost) here delta = after_cost - before_cost probChange *= exp(-delta / (temperature * options.alpha)) end @@ -434,6 +448,21 @@ function mutate!( return MutationResult{N,P}(; tree=tree) end +function mutate!( + tree::N, + member::P, + ::Val{:mutate_feature}, + ::AbstractMutationWeights, + options::AbstractOptions; + recorder::RecordType, + nfeatures, + kws..., +) where {N<:AbstractExpression,P<:AbstractPopMember} + tree = mutate_feature(tree, nfeatures) + @recorder recorder["type"] = "mutate_feature" + return MutationResult{N,P}(; tree=tree) +end + function mutate!( tree::N, member::P, @@ -490,10 +519,9 @@ function mutate!( ::AbstractMutationWeights, options::AbstractOptions; recorder::RecordType, - nfeatures, kws..., ) where {N<:AbstractExpression,P<:AbstractPopMember} - tree = delete_random_op!(tree, options, nfeatures) + tree = delete_random_op!(tree) @recorder recorder["type"] = "delete_node" return MutationResult{N,P}(; tree=tree) end diff --git a/src/MutationFunctions.jl b/src/MutationFunctions.jl index 4f9111b5f..3f46c879b 100644 --- a/src/MutationFunctions.jl +++ b/src/MutationFunctions.jl @@ -12,8 +12,13 @@ using DynamicExpressions: set_node!, count_nodes, has_constants, - has_operators -using ..CoreModule: AbstractOptions, DATA_TYPE + has_operators, + get_child, + set_child!, + max_degree +using ..CoreModule: AbstractOptions, DATA_TYPE, init_value, sample_value + +import ..CoreModule: mutate_value """ get_contents_for_mutation(ex::AbstractExpression, rng::AbstractRNG) @@ -40,6 +45,19 @@ function with_contents_for_mutation(ex::AbstractExpression, new_contents, ::Noth return with_contents(ex, new_contents) end +""" + get_nfeatures_for_mutation(ex::AbstractExpression, context, nfeatures::Int) + +Return the number of features available for mutation in the given expression. +You can overload this function for custom expression types that need +context-specific feature ranges (e.g., TemplateExpression subexpressions). + +The default implementation simply returns the global `nfeatures`. +""" +function get_nfeatures_for_mutation(::AbstractExpression, context, nfeatures::Int) + return nfeatures +end + """ random_node(tree::AbstractNode; filter::F=Returns(true)) @@ -56,18 +74,24 @@ function random_node( return rand(rng, NodeSampler(; tree, filter)) end -"""Swap operands in binary operator for ops like pow and divide""" +"""Swap operands in operators""" function swap_operands(ex::AbstractExpression, rng::AbstractRNG=default_rng()) tree, context = get_contents_for_mutation(ex, rng) ex = with_contents_for_mutation(ex, swap_operands(tree, rng), context) return ex end function swap_operands(tree::AbstractNode, rng::AbstractRNG=default_rng()) - if !any(node -> node.degree == 2, tree) + if !any(node -> node.degree > 1, tree) return tree end - node = rand(rng, NodeSampler(; tree, filter=t -> t.degree == 2)) - node.l, node.r = node.r, node.l + node = rand(rng, NodeSampler(; tree, filter=t -> t.degree > 1)) + deg = node.degree + i1 = rand(rng, 1:deg) + i2 = deg == 2 ? (i1 == 1 ? 2 : 1) : rand(rng, filter(!=(i1), 1:deg)) + n1 = get_child(node, i1) + n2 = get_child(node, i2) + set_child!(node, n2, i1) + set_child!(node, n1, i2) return tree end @@ -80,19 +104,13 @@ function mutate_operator( return ex end function mutate_operator( - tree::AbstractExpressionNode{T}, - options::AbstractOptions, - rng::AbstractRNG=default_rng(), -) where {T} + tree::AbstractExpressionNode, options::AbstractOptions, rng::AbstractRNG=default_rng() +) if !(has_operators(tree)) return tree end node = rand(rng, NodeSampler(; tree, filter=t -> t.degree != 0)) - if node.degree == 1 - node.op = rand(rng, 1:(options.nuna)) - else - node.op = rand(rng, 1:(options.nbin)) - end + node.op = rand(rng, 1:(options.nops[node.degree])) return tree end @@ -121,13 +139,15 @@ function mutate_constant( return tree end node = rand(rng, NodeSampler(; tree, filter=t -> (t.degree == 0 && t.constant))) - - node.val *= mutate_factor(T, temperature, options, rng) - + node.val = mutate_value(rng, node.val, temperature, options) return tree end -function mutate_factor(::Type{T}, temperature, options, rng) where {T<:DATA_TYPE} +function mutate_value(rng::AbstractRNG, val::Number, temperature, options) + return val * mutate_factor(typeof(val), temperature, options, rng) +end + +function mutate_factor(::Type{T}, temperature, options, rng) where {T<:Number} bottom = 1//10 maxChange = options.perturbation_factor * temperature + 1 + bottom factor = T(maxChange^rand(rng, T)) @@ -141,52 +161,70 @@ function mutate_factor(::Type{T}, temperature, options, rng) where {T<:DATA_TYPE return factor end -# TODO: Shouldn't we add a mutate_feature here? +"""Randomly change which feature a variable node points to""" +function mutate_feature( + ex::AbstractExpression{T}, nfeatures::Int, rng::AbstractRNG=default_rng() +) where {T<:DATA_TYPE} + tree, context = get_contents_for_mutation(ex, rng) + local_nfeatures = get_nfeatures_for_mutation(ex, context, nfeatures) + ex = with_contents_for_mutation(ex, mutate_feature(tree, local_nfeatures, rng), context) + return ex +end +function mutate_feature( + tree::AbstractExpressionNode{T}, nfeatures::Int, rng::AbstractRNG=default_rng() +) where {T<:DATA_TYPE} + # Quick checks for if there is nothing to do + nfeatures <= 1 && return tree + !any(node -> node.degree == 0 && !node.constant, tree) && return tree + + node = rand(rng, NodeSampler(; tree, filter=t -> (t.degree == 0 && !t.constant))) + node.feature = rand(rng, filter(!=(node.feature), 1:nfeatures)) + return tree +end """Add a random unary/binary operation to the end of a tree""" function append_random_op( ex::AbstractExpression{T}, options::AbstractOptions, nfeatures::Int, - rng::AbstractRNG=default_rng(); - make_new_bin_op::Union{Bool,Nothing}=nothing, + rng::AbstractRNG=default_rng(), ) where {T<:DATA_TYPE} tree, context = get_contents_for_mutation(ex, rng) + local_nfeatures = get_nfeatures_for_mutation(ex, context, nfeatures) ex = with_contents_for_mutation( - ex, append_random_op(tree, options, nfeatures, rng; make_new_bin_op), context + ex, append_random_op(tree, options, local_nfeatures, rng), context ) return ex end -function append_random_op( - tree::AbstractExpressionNode{T}, +@generated function append_random_op( + tree::AbstractExpressionNode{T,D}, options::AbstractOptions, nfeatures::Int, - rng::AbstractRNG=default_rng(); - make_new_bin_op::Union{Bool,Nothing}=nothing, -) where {T<:DATA_TYPE} - node = rand(rng, NodeSampler(; tree, filter=t -> t.degree == 0)) - - _make_new_bin_op = @something( - make_new_bin_op, rand(rng) < options.nbin / (options.nuna + options.nbin), - ) - - if _make_new_bin_op - newnode = constructorof(typeof(tree))(; - op=rand(rng, 1:(options.nbin)), - l=make_random_leaf(nfeatures, T, typeof(tree), rng, options), - r=make_random_leaf(nfeatures, T, typeof(tree), rng, options), - ) - else - newnode = constructorof(typeof(tree))(; - op=rand(rng, 1:(options.nuna)), - l=make_random_leaf(nfeatures, T, typeof(tree), rng, options), + rng::AbstractRNG=default_rng(), +) where {T<:DATA_TYPE,D} + quote + node = _sample_leaf(rng, tree) + + @assert D == length(options.nops) + csum = (0, cumsum(options.nops)...) + scaled_rand = rand(rng) * last(csum) + # COV_EXCL_START + newnode = Base.Cartesian.@nif( + $D, + i -> scaled_rand > csum[i] && scaled_rand <= csum[i + 1], + i -> constructorof(typeof(tree))(; + op=rand(rng, 1:(options.nops[i])), + children=Base.Cartesian.@ntuple( + i, j -> make_random_leaf(nfeatures, T, typeof(tree), rng, options) + ) + ) ) + # COV_EXCL_STOP + set_node!(node, newnode) + return tree end - - set_node!(node, newnode) - - return tree end +_sample_leaf(rng, tree) = rand(rng, NodeSampler(; tree, filter=t -> t.degree == 0)) """Insert random node""" function insert_random_op( @@ -196,32 +234,41 @@ function insert_random_op( rng::AbstractRNG=default_rng(), ) where {T<:DATA_TYPE} tree, context = get_contents_for_mutation(ex, rng) + local_nfeatures = get_nfeatures_for_mutation(ex, context, nfeatures) ex = with_contents_for_mutation( - ex, insert_random_op(tree, options, nfeatures, rng), context + ex, insert_random_op(tree, options, local_nfeatures, rng), context ) return ex end -function insert_random_op( - tree::AbstractExpressionNode{T}, +@generated function insert_random_op( + tree::AbstractExpressionNode{T,D}, options::AbstractOptions, nfeatures::Int, rng::AbstractRNG=default_rng(), -) where {T<:DATA_TYPE} - node = rand(rng, NodeSampler(; tree)) - choice = rand(rng) - make_new_bin_op = choice < options.nbin / (options.nuna + options.nbin) - left = copy(node) - - if make_new_bin_op - right = make_random_leaf(nfeatures, T, typeof(tree), rng, options) - newnode = constructorof(typeof(tree))(; - op=rand(rng, 1:(options.nbin)), l=left, r=right +) where {T<:DATA_TYPE,D} + quote + node = rand(rng, NodeSampler(; tree)) + csum = (0, cumsum(options.nops)...) + scaled_rand = rand(rng) * last(csum) + newnode = Base.Cartesian.@nif( + $D, + i -> scaled_rand > csum[i] && scaled_rand <= csum[i + 1], # COV_EXCL_LINE + i -> let # COV_EXCL_LINE + arg_to_carry = rand(rng, 1:i) + children = Base.Cartesian.@ntuple( + i, + j -> if j == arg_to_carry # COV_EXCL_LINE + copy(node) + else + make_random_leaf(nfeatures, T, typeof(tree), rng, options) + end + ) + constructorof(typeof(tree))(; op=rand(rng, 1:(options.nops[i])), children) + end ) - else - newnode = constructorof(typeof(tree))(; op=rand(rng, 1:(options.nuna)), l=left) + set_node!(node, newnode) + return tree end - set_node!(node, newnode) - return tree end """Add random node to the top of a tree""" @@ -232,32 +279,43 @@ function prepend_random_op( rng::AbstractRNG=default_rng(), ) where {T<:DATA_TYPE} tree, context = get_contents_for_mutation(ex, rng) + local_nfeatures = get_nfeatures_for_mutation(ex, context, nfeatures) ex = with_contents_for_mutation( - ex, prepend_random_op(tree, options, nfeatures, rng), context + ex, prepend_random_op(tree, options, local_nfeatures, rng), context ) return ex end -function prepend_random_op( - tree::AbstractExpressionNode{T}, + +@generated function prepend_random_op( + tree::AbstractExpressionNode{T,D}, options::AbstractOptions, nfeatures::Int, rng::AbstractRNG=default_rng(), -) where {T<:DATA_TYPE} - node = tree - choice = rand(rng) - make_new_bin_op = choice < options.nbin / (options.nuna + options.nbin) - left = copy(tree) - - if make_new_bin_op - right = make_random_leaf(nfeatures, T, typeof(tree), rng, options) - newnode = constructorof(typeof(tree))(; - op=rand(rng, 1:(options.nbin)), l=left, r=right +) where {T<:DATA_TYPE,D} + quote + @assert D == length(options.nops) + csum = (0, cumsum(options.nops)...) + scaled_rand = rand(rng) * last(csum) + + newroot = Base.Cartesian.@nif( + $D, + i -> scaled_rand > csum[i] && scaled_rand <= csum[i + 1], # COV_EXCL_LINE + i -> let # COV_EXCL_LINE + carry = rand(rng, 1:i) + children = Base.Cartesian.@ntuple( + i, + j -> if j == carry # COV_EXCL_LINE + tree + else + make_random_leaf(nfeatures, T, typeof(tree), rng, options) + end + ) + constructorof(typeof(tree))(; op=rand(rng, 1:options.nops[i]), children) + end, ) - else - newnode = constructorof(typeof(tree))(; op=rand(rng, 1:(options.nuna)), l=left) + + return newroot end - set_node!(node, newnode) - return node end function make_random_leaf( @@ -265,84 +323,36 @@ function make_random_leaf( ::Type{T}, ::Type{N}, rng::AbstractRNG=default_rng(), - ::Union{AbstractOptions,Nothing}=nothing, + options::Union{AbstractOptions,Nothing}=nothing, ) where {T<:DATA_TYPE,N<:AbstractExpressionNode} if rand(rng, Bool) - return constructorof(N)(T; val=randn(rng, T)) + return constructorof(N)(T; val=sample_value(rng, T, options)) else return constructorof(N)(T; feature=rand(rng, 1:nfeatures)) end end -"""Return a random node from the tree with parent, and side ('n' for no parent)""" -function random_node_and_parent(tree::AbstractNode, rng::AbstractRNG=default_rng()) - if tree.degree == 0 - return tree, tree, 'n' - end - parent = rand(rng, NodeSampler(; tree, filter=t -> t.degree != 0)) - if parent.degree == 1 || rand(rng, Bool) - return (parent.l, parent, 'l') - else - return (parent.r, parent, 'r') - end -end - """Select a random node, and splice it out of the tree.""" -function delete_random_op!( - ex::AbstractExpression{T}, - options::AbstractOptions, - nfeatures::Int, - rng::AbstractRNG=default_rng(), -) where {T<:DATA_TYPE} - tree, context = get_contents_for_mutation(ex, rng) - ex = with_contents_for_mutation( - ex, delete_random_op!(tree, options, nfeatures, rng), context - ) - return ex +function delete_random_op!(ex::AbstractExpression, rng::AbstractRNG=default_rng()) + tree, ctx = get_contents_for_mutation(ex, rng) + newtree = delete_random_op!(tree, rng) + return with_contents_for_mutation(ex, newtree, ctx) end -function delete_random_op!( - tree::AbstractExpressionNode{T}, - options::AbstractOptions, - nfeatures::Int, - rng::AbstractRNG=default_rng(), -) where {T<:DATA_TYPE} - node, parent, side = random_node_and_parent(tree, rng) - isroot = side == 'n' - if node.degree == 0 - # Replace with new constant - newnode = make_random_leaf(nfeatures, T, typeof(tree), rng, options) - set_node!(node, newnode) - elseif node.degree == 1 - # Join one of the children with the parent - if isroot - return node.l - elseif parent.l == node - parent.l = node.l - else - parent.r = node.l - end +function delete_random_op!(tree::AbstractExpressionNode, rng::AbstractRNG=default_rng()) + tree.degree == 0 && return tree + + node = rand(rng, NodeSampler(; tree, filter=t -> t.degree > 0)) + carry_idx = rand(rng, 1:(node.degree)) + carry = get_child(node, carry_idx) + + if node === tree + return carry else - # Join one of the children with the parent - if rand(rng, Bool) - if isroot - return node.l - elseif parent.l == node - parent.l = node.l - else - parent.r = node.l - end - else - if isroot - return node.r - elseif parent.l == node - parent.l = node.r - else - parent.r = node.r - end - end + parent, idx = _find_parent(tree, node) + set_child!(parent, carry, idx) + return tree end - return tree end function randomize_tree( @@ -353,8 +363,9 @@ function randomize_tree( rng::AbstractRNG=default_rng(), ) tree, context = get_contents_for_mutation(ex, rng) + local_nfeatures = get_nfeatures_for_mutation(ex, context, nfeatures) ex = with_contents_for_mutation( - ex, randomize_tree(tree, curmaxsize, options, nfeatures, rng), context + ex, randomize_tree(tree, curmaxsize, options, local_nfeatures, rng), context ) return ex end @@ -378,7 +389,7 @@ function gen_random_tree( rng::AbstractRNG=default_rng(), ) where {T<:DATA_TYPE} # Note that this base tree is just a placeholder; it will be replaced. - tree = constructorof(options.node_type)(T; val=convert(T, 1)) + tree = constructorof(options.node_type)(T; val=init_value(T)) for i in 1:length # TODO: This can be larger number of nodes than length. tree = append_random_op(tree, options, nfeatures, rng) @@ -386,6 +397,47 @@ function gen_random_tree( return tree end +@generated function _make_node( + arity::Int, + proto::AbstractExpressionNode{<:Any,D}, + nfeatures::Int, + ::Type{T}, + options::AbstractOptions, + rng::AbstractRNG, +) where {T,D} + quote + # COV_EXCL_START + Base.Cartesian.@nif( + $D, + i -> arity == i, + i -> constructorof(typeof(proto))(; + op=rand(rng, 1:options.nops[i]), + children=Base.Cartesian.@ntuple( + i, j -> make_random_leaf(nfeatures, T, typeof(proto), rng, options), + ), + ), + ) + # COV_EXCL_STOP + end +end + +function _arity_picker(rng::AbstractRNG, remaining::Int, nops::NTuple{D,Int}) where {D} + total = 0 + limit = min(D, remaining) + for k in 1:limit + total += @inbounds nops[k] + end + total == 0 && return 0 + + thresh = rand(rng, 1:total) + acc = 0 + for k in 1:(limit - 1) + acc += @inbounds nops[k] + thresh <= acc && return k + end + return limit +end + function gen_random_tree_fixed_size( node_count::Int, options::AbstractOptions, @@ -393,17 +445,28 @@ function gen_random_tree_fixed_size( ::Type{T}, rng::AbstractRNG=default_rng(), ) where {T<:DATA_TYPE} + # (1) start with a single leaf tree = make_random_leaf(nfeatures, T, options.node_type, rng, options) - cur_size = count_nodes(tree) - while cur_size < node_count - if cur_size == node_count - 1 # only unary operator allowed. - options.nuna == 0 && break # We will go over the requested amount, so we must break. - tree = append_random_op(tree, options, nfeatures, rng; make_new_bin_op=false) - else - tree = append_random_op(tree, options, nfeatures, rng) - end - cur_size = count_nodes(tree) + cur_size = 1 + + # (2) grow the tree + while true + remaining = node_count - cur_size + remaining == 0 && break + + arity = _arity_picker(rng, remaining, options.nops) + arity == 0 && break + + # choose a random leaf to expand + leaf = rand(rng, NodeSampler(; tree, filter=t -> t.degree == 0)) + + # make a new operator node of that arity + newnode = _make_node(arity, leaf, nfeatures, T, options, rng) + + set_node!(leaf, newnode) + cur_size += arity end + return tree end @@ -424,38 +487,34 @@ end """Crossover between two expressions""" function crossover_trees( tree1::N, tree2::N, rng::AbstractRNG=default_rng() -) where {T,N<:AbstractExpressionNode{T}} - if tree1 === tree2 - error("Attempted to crossover the same tree!") - end - tree1 = copy(tree1) - tree2 = copy(tree2) - - node1, parent1, side1 = random_node_and_parent(tree1, rng) - node2, parent2, side2 = random_node_and_parent(tree2, rng) - - node1 = copy(node1) - - if side1 == 'l' - parent1.l = copy(node2) - # tree1 now contains this. - elseif side1 == 'r' - parent1.r = copy(node2) - # tree1 now contains this. - else # 'n' - # This means that there is no parent2. - tree1 = copy(node2) +) where {N<:AbstractExpressionNode} + tree1 === tree2 && error("Attempted to crossover the same tree!") + + # copy whole trees so original expressions remain unchanged + t1 = copy(tree1) + t2 = copy(tree2) + + # pick random nodes (and parents) in each tree + n1, p1, i1 = _random_node_and_parent(t1, rng) + n2, p2, i2 = _random_node_and_parent(t2, rng) + + n1 = copy(n1) + + # splice n2 into t1 + if i1 == 0 + t1 = copy(n2) + else + set_child!(p1, copy(n2), i1) end - if side2 == 'l' - parent2.l = node1 - elseif side2 == 'r' - parent2.r = node1 - else # 'n' - tree2 = node1 + # splice n1 into t2 + if i2 == 0 + t2 = n1 + else + set_child!(p2, n1, i2) end - return tree1, tree2 + return t1, t2 end function get_two_nodes_without_loop(tree::AbstractNode, rng::AbstractRNG; max_attempts=10) @@ -486,12 +545,8 @@ function form_random_connection!(tree::AbstractNode, rng::AbstractRNG=default_rn return tree end - # Set one of the children to be this new child: - if parent.degree == 1 || rand(rng, Bool) - parent.l = new_child - else - parent.r = new_child - end + i = rand(rng, 1:(parent.degree)) + set_child!(parent, new_child, i) return tree end @@ -502,16 +557,42 @@ end function break_random_connection!(tree::AbstractNode, rng::AbstractRNG=default_rng()) tree.degree == 0 && return tree parent = rand(rng, NodeSampler(; tree, filter=t -> t.degree != 0)) - if parent.degree == 1 || rand(rng, Bool) - parent.l = copy(parent.l) + i = rand(rng, 1:(parent.degree)) + set_child!(parent, copy(get_child(parent, i)), i) + return tree +end + +"""Return (node, parent, idx) where `idx == 0` iff node is the root.""" +function _random_node_and_parent( + tree::AbstractExpressionNode, rng::AbstractRNG=default_rng() +) + node = rand(rng, NodeSampler(; tree)) + if node === tree + return node, node, 0 else - parent.r = copy(parent.r) + parent, idx = _find_parent(tree, node) + return node, parent, idx end - return tree +end +function _find_parent(tree::N, node::N) where {N<:AbstractNode} + r = Ref{Tuple{typeof(tree),Int}}() + finished = any(tree) do t + if t.degree > 0 + for i in 1:(t.degree) + if get_child(t, i) === node + r[] = (t, i) + return true + end + end + end + return false + end + @assert finished + return r[] end -function is_valid_rotation_node(node::AbstractNode) - return (node.degree > 0 && node.l.degree > 0) || (node.degree == 2 && node.r.degree > 0) +function _valid_rotation_root(tree::AbstractNode) + return tree.degree > 0 && any(i -> get_child(tree, i).degree > 0, 1:(tree.degree)) end function randomly_rotate_tree!(ex::AbstractExpression, rng::AbstractRNG=default_rng()) @@ -519,101 +600,36 @@ function randomly_rotate_tree!(ex::AbstractExpression, rng::AbstractRNG=default_ rotated_tree = randomly_rotate_tree!(tree, rng) return with_contents_for_mutation(ex, rotated_tree, context) end -function randomly_rotate_tree!(tree::AbstractNode, rng::AbstractRNG=default_rng()) - num_rotation_nodes = count(is_valid_rotation_node, tree) - - # Return the tree if no valid nodes are found - if num_rotation_nodes == 0 +function randomly_rotate_tree!(tree::AbstractExpressionNode, rng::AbstractRNG=default_rng()) + num_valid_rotation_roots = count(_valid_rotation_root, tree) + if num_valid_rotation_roots == 0 return tree end - - root_is_valid_rotation_node = is_valid_rotation_node(tree) - - # Now, we decide if we want to rotate at the root, or at a random node - rotate_at_root = root_is_valid_rotation_node && rand(rng) < 1.0 / num_rotation_nodes - - subtree_parent = if rotate_at_root - tree + rotate_at_root = rand(rng) < 1 / num_valid_rotation_roots + (parent, root_idx, root) = if rotate_at_root + (tree, 0, tree) else - rand( - rng, - NodeSampler(; - tree, - filter=t -> ( - (t.degree > 0 && is_valid_rotation_node(t.l)) || - (t.degree == 2 && is_valid_rotation_node(t.r)) - ), - ), + _root = rand( + rng, NodeSampler(; tree, filter=t -> t !== tree && _valid_rotation_root(t)) ) - end + _parent, _root_idx = _find_parent(tree, _root) - subtree_side = if rotate_at_root - :n - elseif subtree_parent.degree == 1 - :l - else - if is_valid_rotation_node(subtree_parent.l) && - (!is_valid_rotation_node(subtree_parent.r) || rand(rng, Bool)) - :l - else - :r - end + (_parent, _root_idx, _root) end - subtree_root = if rotate_at_root - tree - elseif subtree_side == :l - subtree_parent.l - else - subtree_parent.r - end + pivot_idx = rand(rng, [i for i in 1:(root.degree) if get_child(root, i).degree > 0]) + pivot = get_child(root, pivot_idx) + grand_child_idx = rand(rng, 1:(pivot.degree)) + grand_child = get_child(pivot, grand_child_idx) + set_child!(root, grand_child, pivot_idx) + set_child!(pivot, root, grand_child_idx) - # Perform the rotation - # (reference: https://web.archive.org/web/20230326202118/https://upload.wikimedia.org/wikipedia/commons/1/15/Tree_Rotations.gif) - right_rotation_valid = subtree_root.l.degree > 0 - left_rotation_valid = subtree_root.degree == 2 && subtree_root.r.degree > 0 - - right_rotation = right_rotation_valid && (!left_rotation_valid || rand(rng, Bool)) - if right_rotation - node_5 = subtree_root - node_3 = leftmost(node_5) - node_4 = rightmost(node_3) - - set_leftmost!(node_5, node_4) - set_rightmost!(node_3, node_5) - if rotate_at_root - return node_3 # new root - elseif subtree_side == :l - subtree_parent.l = node_3 - else - subtree_parent.r = node_3 - end - else # left rotation - node_3 = subtree_root - node_5 = rightmost(node_3) - node_4 = leftmost(node_5) - - set_rightmost!(node_3, node_4) - set_leftmost!(node_5, node_3) - if rotate_at_root - return node_5 # new root - elseif subtree_side == :l - subtree_parent.l = node_5 - else - subtree_parent.r = node_5 - end + if rotate_at_root + return pivot + else + set_child!(parent, pivot, root_idx) + return tree end - - return tree end -#! format: off -# These functions provide an easier way to work with unary nodes, by -# simply letting `.r` fall back to `.l` if the node is a unary operator. -leftmost(node::AbstractNode) = node.l -rightmost(node::AbstractNode) = node.degree == 1 ? node.l : node.r -set_leftmost!(node::AbstractNode, l::AbstractNode) = (node.l = l) -set_rightmost!(node::AbstractNode, r::AbstractNode) = node.degree == 1 ? (node.l = r) : (node.r = r) -#! format: on - end diff --git a/src/MutationWeights.jl b/src/MutationWeights.jl index 5b6253cec..b18027ae3 100644 --- a/src/MutationWeights.jl +++ b/src/MutationWeights.jl @@ -77,6 +77,7 @@ will be normalized to sum to 1.0 after initialization. - `mutate_constant::Float64`: How often to mutate a constant. - `mutate_operator::Float64`: How often to mutate an operator. +- `mutate_feature::Float64`: How often to mutate which feature a variable node references. - `swap_operands::Float64`: How often to swap the operands of a binary operator. - `rotate_tree::Float64`: How often to perform a tree rotation at a random node. - `add_node::Float64`: How often to append a node to the tree. @@ -102,6 +103,7 @@ will be normalized to sum to 1.0 after initialization. Base.@kwdef mutable struct MutationWeights <: AbstractMutationWeights mutate_constant::Float64 = 0.0353 mutate_operator::Float64 = 3.63 + mutate_feature::Float64 = 0.1 swap_operands::Float64 = 0.00608 rotate_tree::Float64 = 1.42 add_node::Float64 = 0.0771 diff --git a/src/Operators.jl b/src/Operators.jl index 5d9bd678d..d26484da1 100644 --- a/src/Operators.jl +++ b/src/Operators.jl @@ -11,11 +11,10 @@ using ...UtilsModule: @ignore #TODO - actually add these operators to the module! # TODO: Should this be limited to AbstractFloat instead? -function gamma(x::T)::T where {T<:DATA_TYPE} +function gamma(x::T)::T where {T<:Number} out = SpecialFunctions.gamma(x) return isinf(out) ? T(NaN) : out end -gamma(x) = SpecialFunctions.gamma(x) atanh_clip(x) = atanh(mod(x + oneunit(x), oneunit(x) + oneunit(x)) - oneunit(x)) * one(x) # == atanh((x + 1) % 2 - 1) diff --git a/src/Options.jl b/src/Options.jl index af201ed53..cde0d5a31 100644 --- a/src/Options.jl +++ b/src/Options.jl @@ -3,7 +3,13 @@ module OptionsModule using DispatchDoctor: @unstable using Optim: Optim using DynamicExpressions: - OperatorEnum, Expression, default_node_type, AbstractExpression, AbstractExpressionNode + OperatorEnum, + AbstractOperatorEnum, + Expression, + default_node_type, + AbstractExpression, + AbstractExpressionNode +using DynamicExpressions.NodeModule: has_max_degree, with_max_degree using ADTypes: AbstractADType, ADTypes using LossFunctions: L2DistLoss, SupervisedLoss using Optim: Optim @@ -43,67 +49,70 @@ using ..ExpressionSpecModule: """Build constraints on operator-level complexity from a user-passed dict.""" @unstable function build_constraints(; - una_constraints, - bin_constraints, - @nospecialize(unary_operators), - @nospecialize(binary_operators) -)::Tuple{Vector{Int},Vector{Tuple{Int,Int}}} - # Expect format ((*)=>(-1, 3)), etc. - # TODO: Need to disable simplification if (*, -, +, /) are constrained? - # Or, just quit simplification is constraints violated. - - is_una_constraints_already_done = una_constraints isa Vector{Int} - _una_constraints1 = if una_constraints isa Array && !is_una_constraints_already_done - Dict(una_constraints) + constraints=nothing, + una_constraints=nothing, + bin_constraints=nothing, + @nospecialize(operators_by_degree::Tuple{Vararg{Any,D}}) +) where {D} + constraints = if constraints !== nothing + @assert all(isnothing, (una_constraints, bin_constraints)) + constraints + elseif any(!isnothing, (una_constraints, bin_constraints)) + (una_constraints, bin_constraints) else - una_constraints + ntuple(i -> nothing, Val(D)) end - _una_constraints2 = if _una_constraints1 === nothing - fill(-1, length(unary_operators)) - elseif !is_una_constraints_already_done - [ - haskey(_una_constraints1, op) ? _una_constraints1[op]::Int : -1 for - op in unary_operators - ] - else - _una_constraints1 + return _build_constraints(constraints, operators_by_degree) +end +@unstable function _build_constraints( + constraints, @nospecialize(operators_by_degree::Tuple{Vararg{Any,D}}) +) where {D} + # Expect format ((*)=>(-1, 3)), etc. + + is_constraints_already_done = ntuple(Val(D)) do i + i == 1 && constraints[i] isa Vector{Int} || + i > 1 && constraints[i] isa Vector{NTuple{i,Int}} end - is_bin_constraints_already_done = bin_constraints isa Vector{Tuple{Int,Int}} - _bin_constraints1 = if bin_constraints isa Array && !is_bin_constraints_already_done - Dict(bin_constraints) - else - bin_constraints + _op_constraints = ntuple(Val(D)) do i + if constraints[i] isa Array && !is_constraints_already_done[i] + Dict(constraints[i]) + else + constraints[i] + end end - _bin_constraints2 = if _bin_constraints1 === nothing - fill((-1, -1), length(binary_operators)) - elseif !is_bin_constraints_already_done - [ - if haskey(_bin_constraints1, op) - _bin_constraints1[op]::Tuple{Int,Int} + + return ntuple(Val(D)) do i + let default_value = i == 1 ? -1 : ntuple(j -> -1, i) + if isnothing(_op_constraints[i]) + fill(default_value, length(operators_by_degree[i])) + elseif !is_constraints_already_done[i] + typeof(default_value)[ + get(_op_constraints[i], op, default_value) for + op in operators_by_degree[i] + ] else - (-1, -1) - end for op in binary_operators - ] - else - _bin_constraints1 + _op_constraints[i]::Vector{typeof(default_value)} + end + end end - - return _una_constraints2, _bin_constraints2 end @unstable function build_nested_constraints(; - @nospecialize(binary_operators), @nospecialize(unary_operators), nested_constraints + nested_constraints, @nospecialize(operators_by_degree) ) nested_constraints === nothing && return nested_constraints - # Check that intersection of binary operators and unary operators is empty: - for op in binary_operators - if op ∈ unary_operators + + # Check that no operator appears in multiple degrees: + all_operators = Set() + for ops in operators_by_degree, op in ops + if op ∈ all_operators error( - "Operator $(op) is both a binary and unary operator. " * + "Operator $(op) appears in multiple degrees. " * "You can't use nested constraints.", ) end + push!(all_operators, op) end # Convert to dict: @@ -115,31 +124,50 @@ end [cons[1] => Dict(cons[2]...) for cons in nested_constraints]... ) end + for (op, nested_constraint) in _nested_constraints - if !(op ∈ binary_operators || op ∈ unary_operators) + if !(op ∈ all_operators) error("Operator $(op) is not in the operator set.") end for (nested_op, max_nesting) in nested_constraint - if !(nested_op ∈ binary_operators || nested_op ∈ unary_operators) + if !(nested_op ∈ all_operators) error("Operator $(nested_op) is not in the operator set.") end - @assert nested_op ∈ binary_operators || nested_op ∈ unary_operators @assert max_nesting >= -1 && typeof(max_nesting) <: Int end end # Lastly, we clean it up into a dict of (degree,op_idx) => max_nesting. return [ - let (degree, idx) = if op ∈ binary_operators - 2, findfirst(isequal(op), binary_operators)::Int - else - 1, findfirst(isequal(op), unary_operators)::Int + let (degree, idx) = begin + found_degree = 0 + found_idx = 0 + for (d, ops) in enumerate(operators_by_degree) + idx_in_degree = findfirst(isequal(op), ops) + if idx_in_degree !== nothing + found_degree = d + found_idx = idx_in_degree + break + end + end + found_degree == 0 && error("Operator $(op) is not in the operator set.") + (found_degree, found_idx) end, new_max_nesting_dict = [ - let (nested_degree, nested_idx) = if nested_op ∈ binary_operators - 2, findfirst(isequal(nested_op), binary_operators)::Int - else - 1, findfirst(isequal(nested_op), unary_operators)::Int + let (nested_degree, nested_idx) = begin + found_degree = 0 + found_idx = 0 + for (d, ops) in enumerate(operators_by_degree) + idx_in_degree = findfirst(isequal(nested_op), ops) + if idx_in_degree !== nothing + found_degree = d + found_idx = idx_in_degree + break + end + end + found_degree == 0 && + error("Operator $(nested_op) is not in the operator set.") + (found_degree, found_idx) end (nested_degree, nested_idx, max_nesting) end for (nested_op, max_nesting) in nested_constraint @@ -150,95 +178,69 @@ end ] end -function binopmap(@nospecialize(op)) - if op == plus - return + - elseif op == mult - return * - elseif op == sub - return - - elseif op == div - return / - elseif op == ^ - return safe_pow - elseif op == pow - return safe_pow - elseif op == Base.:(>) - return greater - elseif op == Base.:(<) - return less - elseif op == Base.:(>=) - return greater_equal - elseif op == Base.:(<=) - return less_equal - end - return op -end -function inverse_binopmap(@nospecialize(op)) - if op == safe_pow - return ^ - elseif op == greater - return Base.:(>) - elseif op == less - return Base.:(<) - elseif op == greater_equal - return Base.:(>=) - elseif op == less_equal - return Base.:(<=) - end - return op -end +const OP_MAP = Dict{Any,Any}( + plus => (+), + mult => (*), + sub => (-), + div => (/), + (^) => safe_pow, + pow => safe_pow, + Base.:(>) => greater, + Base.:(<) => less, + Base.:(>=) => greater_equal, + Base.:(<=) => less_equal, + log => safe_log, + log10 => safe_log10, + log2 => safe_log2, + log1p => safe_log1p, + sqrt => safe_sqrt, + asin => safe_asin, + acos => safe_acos, + acosh => safe_acosh, + atanh => safe_atanh, +) +const INVERSE_OP_MAP = Dict{Any,Any}( + safe_pow => (^), + greater => Base.:(>), + less => Base.:(<), + greater_equal => Base.:(>=), + less_equal => Base.:(<=), + safe_log => log, + safe_log10 => log10, + safe_log2 => log2, + safe_log1p => log1p, + safe_sqrt => sqrt, + safe_asin => asin, + safe_acos => acos, + safe_acosh => acosh, + safe_atanh => atanh, +) -function unaopmap(@nospecialize(op)) - if op == log - return safe_log - elseif op == log10 - return safe_log10 - elseif op == log2 - return safe_log2 - elseif op == log1p - return safe_log1p - elseif op == sqrt - return safe_sqrt - elseif op == asin - return safe_asin - elseif op == acos - return safe_acos - elseif op == acosh - return safe_acosh - elseif op == atanh - return safe_atanh - end - return op -end -function inverse_unaopmap(@nospecialize(op)) - if op == safe_log - return log - elseif op == safe_log10 - return log10 - elseif op == safe_log2 - return log2 - elseif op == safe_log1p - return log1p - elseif op == safe_sqrt - return sqrt - elseif op == safe_asin - return asin - elseif op == safe_acos - return acos - elseif op == safe_acosh - return acosh - elseif op == safe_atanh - return atanh - end - return op -end +opmap(@nospecialize(op)) = get(OP_MAP, op, op) +inverse_opmap(@nospecialize(op)) = get(INVERSE_OP_MAP, op, op) recommend_loss_function_expression(expression_type) = false create_mutation_weights(w::AbstractMutationWeights) = w create_mutation_weights(w::NamedTuple) = MutationWeights(; w...) +@unstable function with_max_degree_from_context( + node_type, user_provided_operators, operators +) + if has_max_degree(node_type) + # The user passed a node type with an explicit max degree, + # so we don't override it. + node_type + else + if user_provided_operators + # We select a degree so that we fit the number of operators + with_max_degree(node_type, Val(length(operators))) + else + with_max_degree(node_type, Val(2)) + end + end +end + const deprecated_options_mapping = Base.ImmutableDict( :mutationWeights => :mutation_weights, :hofMigration => :hof_migration, @@ -278,6 +280,10 @@ const OPTION_DESCRIPTIONS = """- `defaults`: What set of defaults to use for `Op of the same type as input, and outputs the same type. For the SymbolicUtils simplification backend, you will need to define a generic method of the operator so it takes arbitrary types. +- `operator_enum_constructor`: Constructor function to use for creating the operators enum. + By default, OperatorEnum is used, but you can provide a different constructor like + GenericOperatorEnum. The constructor must accept the keyword arguments 'binary_operators' + and 'unary_operators'. - `unary_operators`: Same, but for unary operators (one input scalar, gives an output scalar). - `constraints`: Array of pairs specifying size constraints @@ -341,6 +347,11 @@ const OPTION_DESCRIPTIONS = """- `defaults`: What set of defaults to use for `Op end - `loss_function_expression`: Similar to `loss_function`, but takes `AbstractExpression` instead of `AbstractExpressionNode` as its first argument. Useful for `TemplateExpressionSpec`. +- `loss_scale`: Determines how loss values are scaled when computing scores. Options are: + - `:log` (default): Uses logarithmic scaling of loss ratios. This mode requires non-negative loss values + and is ideal for traditional loss functions that are always positive. + - `:linear`: Uses direct differences between losses. This mode handles any loss values (including negative) + and is useful for custom loss functions, especially those based on likelihoods. - `expression_spec::AbstractExpressionSpec`: A specification of what types of expressions to use in the search. For example, `ExpressionSpec()` (default). You can also see `TemplateExpressionSpec` and `ParametricExpressionSpec` for specialized cases. @@ -399,6 +410,8 @@ const OPTION_DESCRIPTIONS = """- `defaults`: What set of defaults to use for `Op migrated equations at the end of each cycle. - `fraction_replaced_hof`: What fraction to replace with hall of fame equations at the end of each cycle. +- `fraction_replaced_guesses`: What fraction to replace with user-provided + guess expressions at the end of each cycle. - `should_simplify`: Whether to simplify equations. If you pass a custom objective, this will be set to `false`. - `should_optimize_constants`: Whether to use an optimization algorithm @@ -422,9 +435,9 @@ const OPTION_DESCRIPTIONS = """- `defaults`: What set of defaults to use for `Op an instance of `AbstractADType` (see `ADTypes.jl`). Default is `nothing`, which means `Optim.jl` will estimate gradients (likely with finite differences). You can also pass a symbolic version of the backend - type, such as `:Zygote` for Zygote, `:Enzyme`, etc. Most backends will not - work, and many will never work due to incompatibilities, though support for some - is gradually being added. + type, such as `:Zygote` for Zygote.jl or `:Mooncake` for Mooncake.jl. Most backends + will not work, and many will never work due to incompatibilities, though + support for some is gradually being added. - `perturbation_factor`: When mutating a constant, either multiply or divide by (1+perturbation_factor)^(rand()+1). - `probability_negate_constant`: Probability of negating a constant in the equation @@ -491,8 +504,7 @@ $(OPTION_DESCRIPTIONS) @nospecialize(defaults::Union{VersionNumber,Nothing} = nothing), # Search options: ## 1. Creating the Search Space: - @nospecialize(binary_operators = nothing), - @nospecialize(unary_operators = nothing), + @nospecialize(operators::Union{Nothing,AbstractOperatorEnum} = nothing), @nospecialize(maxsize::Union{Nothing,Integer} = nothing), @nospecialize(maxdepth::Union{Nothing,Integer} = nothing), @nospecialize(expression_spec::Union{Nothing,AbstractExpressionSpec} = nothing), @@ -521,6 +533,10 @@ $(OPTION_DESCRIPTIONS) @nospecialize(adaptive_parsimony_scaling::Union{Real,Nothing} = nothing), ### should_simplify ## 5. Mutations: + @nospecialize( + operator_enum_constructor::Union{Nothing,Type{<:AbstractOperatorEnum},Function} = + nothing + ), @nospecialize( mutation_weights::Union{AbstractMutationWeights,AbstractVector,NamedTuple,Nothing} = nothing @@ -581,6 +597,7 @@ $(OPTION_DESCRIPTIONS) ## 2. Setting the Search Size: ## 3. The Objective: dimensionless_constants_only::Bool=false, + loss_scale::Symbol=:log, ## 4. Working with Complexities: complexity_mapping::Union{Function,ComplexityMapping,Nothing}=nothing, use_frequency::Bool=true, @@ -606,6 +623,7 @@ $(OPTION_DESCRIPTIONS) hof_migration::Bool=true, fraction_replaced::Union{Real,Nothing}=nothing, fraction_replaced_hof::Union{Real,Nothing}=nothing, + fraction_replaced_guesses::Union{Real,Nothing}=nothing, topn::Union{Nothing,Integer}=nothing, ## 9. Data Preprocessing: ## 10. Stopping Criteria: @@ -645,6 +663,8 @@ $(OPTION_DESCRIPTIONS) npopulations::Union{Nothing,Integer}=nothing, npop::Union{Nothing,Integer}=nothing, deprecated_return_state::Union{Bool,Nothing}=nothing, + unary_operators=nothing, + binary_operators=nothing, kws..., ######################################### ) @@ -728,6 +748,13 @@ $(OPTION_DESCRIPTIONS) if output_file !== nothing error("`output_file` is deprecated. Use `output_directory` instead.") end + user_provided_operators = !isnothing(operators) + + if user_provided_operators + @assert binary_operators === nothing + @assert unary_operators === nothing + @assert operator_enum_constructor === nothing + end @assert( count(!isnothing, [elementwise_loss, loss_function, loss_function_expression]) <= 1, @@ -754,8 +781,6 @@ $(OPTION_DESCRIPTIONS) #### Supply defaults ############ #! format: off _default_options = default_options(defaults) - binary_operators = something(binary_operators, _default_options.binary_operators) - unary_operators = something(unary_operators, _default_options.unary_operators) maxsize = something(maxsize, _default_options.maxsize) populations = something(populations, _default_options.populations) population_size = something(population_size, _default_options.population_size) @@ -773,9 +798,14 @@ $(OPTION_DESCRIPTIONS) tournament_selection_p = something(tournament_selection_p, _default_options.tournament_selection_p) fraction_replaced = something(fraction_replaced, _default_options.fraction_replaced) fraction_replaced_hof = something(fraction_replaced_hof, _default_options.fraction_replaced_hof) + fraction_replaced_guesses = something(fraction_replaced_guesses, _default_options.fraction_replaced_guesses) topn = something(topn, _default_options.topn) batching = something(batching, _default_options.batching) batch_size = something(batch_size, _default_options.batch_size) + if !user_provided_operators + binary_operators = something(binary_operators, _default_options.operators.ops[2]) + unary_operators = something(unary_operators, _default_options.operators.ops[1]) + end #! format: on ################################# @@ -791,30 +821,58 @@ $(OPTION_DESCRIPTIONS) @assert maxsize > 3 @assert warmup_maxsize_by >= 0.0f0 - @assert length(unary_operators) <= 8192 - @assert length(binary_operators) <= 8192 @assert tournament_selection_n < population_size "`tournament_selection_n` must be less than `population_size`" + @assert loss_scale in (:log, :linear) "`loss_scale` must be either log or linear" # Make sure nested_constraints contains functions within our operator set: - _nested_constraints = build_nested_constraints(; - binary_operators, unary_operators, nested_constraints - ) + _nested_constraints = if user_provided_operators + build_nested_constraints(; nested_constraints, operators_by_degree=operators.ops) + else + # Convert binary/unary to generic format for backwards compatibility + operators_tuple = (unary_operators, binary_operators) + build_nested_constraints(; nested_constraints, operators_by_degree=operators_tuple) + end if typeof(constraints) <: Tuple - constraints = collect(constraints) + constraints = Dict(constraints) + elseif constraints isa AbstractVector + constraints = Dict(constraints) end if constraints !== nothing - @assert bin_constraints === nothing - @assert una_constraints === nothing - # TODO: This is redundant with the checks in equation_search - for op in binary_operators - @assert !(op in unary_operators) + @assert all(isnothing, (bin_constraints, una_constraints)) + if user_provided_operators + # For generic degree interface, constraints should be handled by the generic function + # Don't set bin_constraints/una_constraints as they shouldn't be used + all_operators = Set() + for ops in operators.ops + for op in ops + if op ∈ all_operators + error( + "Operator $(op) appears in multiple degrees. " * + "You can't use constraints.", + ) + end + push!(all_operators, op) + end + end + else + for op in binary_operators + @assert !(op in unary_operators) + end + for op in unary_operators + @assert !(op in binary_operators) + end + bin_constraints = constraints + una_constraints = constraints end - for op in unary_operators - @assert !(op in binary_operators) + else + # When constraints is nothing, we might still have individual bin_constraints/una_constraints + if user_provided_operators + @assert( + all(isnothing, (bin_constraints, una_constraints)), + "When using user_provided_operators=true, use the 'constraints' parameter instead of 'bin_constraints' and 'una_constraints'" + ) end - bin_constraints = constraints - una_constraints = constraints end if expression_spec !== nothing @@ -842,9 +900,33 @@ $(OPTION_DESCRIPTIONS) node_type = @something(node_type, default_node_type(expression_type)) end - _una_constraints, _bin_constraints = build_constraints(; - una_constraints, bin_constraints, unary_operators, binary_operators - ) + node_type = with_max_degree_from_context(node_type, user_provided_operators, operators) + + operators = if user_provided_operators && operators isa OperatorEnum + # Apply opmap to user-provided operators (e.g., log -> safe_log) + mapped_operators_by_degree = ntuple(length(operators.ops)) do i + map(opmap, operators.ops[i]) + end + OperatorEnum(mapped_operators_by_degree) + else + operators + end + + op_constraints = if user_provided_operators + @assert( + all(isnothing, (una_constraints, bin_constraints)), + "When using user_provided_operators=true, use the 'constraints' parameter instead of 'una_constraints' and 'bin_constraints'" + ) + + build_constraints(; constraints, operators_by_degree=operators.ops) + else + # Convert binary/unary to generic format for backwards compatibility + build_constraints(; + una_constraints, + bin_constraints, + operators_by_degree=(unary_operators, binary_operators), + ) + end complexity_mapping = @something( complexity_mapping, @@ -852,17 +934,18 @@ $(OPTION_DESCRIPTIONS) complexity_of_operators, complexity_of_variables, complexity_of_constants, - binary_operators, - unary_operators, + if user_provided_operators + operators.ops + else + (unary_operators, binary_operators) + end, ) ) - if maxdepth === nothing - maxdepth = maxsize - end + maxdepth = something(maxdepth, maxsize) - if define_helper_functions - # We call here so that mapped operators, like ^ + if define_helper_functions && !user_provided_operators + # We call here so that mapped operators, like `^` # are correctly overloaded, rather than overloading # operators like "safe_pow", etc. OperatorEnum(; @@ -873,15 +956,24 @@ $(OPTION_DESCRIPTIONS) ) end - binary_operators = map(binopmap, binary_operators) - unary_operators = map(unaopmap, unary_operators) - - operators = OperatorEnum(; - binary_operators=binary_operators, - unary_operators=unary_operators, - define_helper_functions=define_helper_functions, - empty_old_operators=false, - ) + operators = if user_provided_operators + operators + else + binary_operators = map(opmap, binary_operators) + unary_operators = map(opmap, unary_operators) + if operator_enum_constructor !== nothing + operator_enum_constructor(; + binary_operators=binary_operators, unary_operators=unary_operators + ) + else + OperatorEnum(; + binary_operators=binary_operators, + unary_operators=unary_operators, + define_helper_functions=define_helper_functions, + empty_old_operators=false, + ) + end + end early_stop_condition = if typeof(early_stop_condition) <: Real # Need to make explicit copy here for this to work: @@ -927,9 +1019,13 @@ $(OPTION_DESCRIPTIONS) output_directory end + nops = map(length, operators.ops) + options = Options{ typeof(complexity_mapping), operator_specialization(typeof(operators), expression_type), + typeof(nops), + typeof(op_constraints), node_type, expression_type, typeof(expression_options), @@ -941,8 +1037,8 @@ $(OPTION_DESCRIPTIONS) print_precision, }( operators, - _bin_constraints, - _una_constraints, + op_constraints, + _nested_constraints, complexity_mapping, tournament_selection_n, tournament_selection_p, @@ -974,17 +1070,18 @@ $(OPTION_DESCRIPTIONS) ncycles_per_iteration, fraction_replaced, fraction_replaced_hof, + fraction_replaced_guesses, topn, verbosity, Val(print_precision), save_to_file, probability_negate_constant, - length(unary_operators), - length(binary_operators), + nops, seed, elementwise_loss, loss_function, loss_function_expression, + loss_scale, node_type, expression_type, expression_options, @@ -1003,7 +1100,6 @@ $(OPTION_DESCRIPTIONS) max_evals, input_stream, skip_mutation_failures, - _nested_constraints, deterministic, define_helper_functions, use_recorder, @@ -1013,11 +1109,11 @@ $(OPTION_DESCRIPTIONS) end function default_options(@nospecialize(version::Union{VersionNumber,Nothing} = nothing)) - if version isa VersionNumber && version < v"1.0.0" + version isa VersionNumber && + version < v"1.0.0" && return (; # Creating the Search Space - binary_operators=[+, -, /, *], - unary_operators=Function[], + operators=OperatorEnum(((), (+, -, /, *))), maxsize=20, # Setting the Search Size populations=15, @@ -1054,61 +1150,67 @@ function default_options(@nospecialize(version::Union{VersionNumber,Nothing} = n # Migration between Populations fraction_replaced=0.00036, fraction_replaced_hof=0.035, + fraction_replaced_guesses=0.001, topn=12, # Performance and Parallelization batching=false, batch_size=50, ) - else - return (; - # Creating the Search Space - binary_operators=Function[+, -, /, *], - unary_operators=Function[], - maxsize=30, - # Setting the Search Size - populations=31, - population_size=27, - ncycles_per_iteration=380, - # Working with Complexities - parsimony=0.0, - warmup_maxsize_by=0.0, - adaptive_parsimony_scaling=1040, - # Mutations - mutation_weights=MutationWeights(; - mutate_constant=0.0346, - mutate_operator=0.293, - swap_operands=0.198, - rotate_tree=4.26, - add_node=2.47, - insert_node=0.0112, - delete_node=0.870, - simplify=0.00209, - randomize=0.000502, - do_nothing=0.273, - optimize=0.0, - form_connection=0.5, - break_connection=0.1, - ), - crossover_probability=0.0259, - annealing=true, - alpha=3.17, - perturbation_factor=0.129, - probability_negate_constant=0.00743, - # Tournament Selection - tournament_selection_n=15, - tournament_selection_p=0.982, - # Migration between Populations - fraction_replaced=0.00036, - ## ^Note: the optimal value found was 0.00000425, - ## but I thought this was a symptom of doing the sweep on such - ## a small problem, so I increased it to the older value of 0.00036 - fraction_replaced_hof=0.0614, - topn=12, - # Performance and Parallelization - batching=false, - batch_size=50, - ) + + defaults = (; + # Creating the Search Space + operators=OperatorEnum(((), (+, -, /, *))), + maxsize=30, + # Setting the Search Size + populations=31, + population_size=27, + ncycles_per_iteration=380, + # Working with Complexities + parsimony=0.0, + warmup_maxsize_by=0.0, + adaptive_parsimony_scaling=1040.0, + # Mutations + mutation_weights=MutationWeights(; + mutate_constant=0.0346, + mutate_operator=0.293, + swap_operands=0.198, + rotate_tree=4.26, + add_node=2.47, + insert_node=0.0112, + delete_node=0.870, + simplify=0.00209, + randomize=0.000502, + do_nothing=0.273, + optimize=0.0, + form_connection=0.5, + break_connection=0.1, + ), + crossover_probability=0.0259, + annealing=true, + alpha=3.17, + perturbation_factor=0.129, + probability_negate_constant=0.00743, + # Tournament Selection + tournament_selection_n=15, + tournament_selection_p=0.982, + # Migration between Populations + fraction_replaced=0.00036, + ## ^Note: the optimal value found was 0.00000425, + ## but I thought this was a symptom of doing the sweep on such + ## a small problem, so I increased it to the older value of 0.00036 + fraction_replaced_hof=0.0614, + fraction_replaced_guesses=0.001, + topn=12, + # Performance and Parallelization + batching=false, + batch_size=50, + ) + + if version isa VersionNumber && version >= v"2.0.0-" + defaults = (; defaults..., adaptive_parsimony_scaling=20.0) end + + return defaults end end diff --git a/src/OptionsStruct.jl b/src/OptionsStruct.jl index 2d77970d5..2c3046204 100644 --- a/src/OptionsStruct.jl +++ b/src/OptionsStruct.jl @@ -14,56 +14,57 @@ This struct defines how complexity is calculated. # Fields - `use`: Shortcut indicating whether we use custom complexities, or just use 1 for everything. -- `binop_complexities`: Complexity of each binary operator. -- `unaop_complexities`: Complexity of each unary operator. +- `op_complexities`: Tuple of vectors, where each vector contains + the complexities for operators of that degree. - `variable_complexity`: Complexity of using a variable. - `constant_complexity`: Complexity of using a constant. """ -struct ComplexityMapping{T<:Real,VC<:Union{T,AbstractVector{T}}} +struct ComplexityMapping{T<:Real,VC<:Union{T,AbstractVector{T}},D} use::Bool - binop_complexities::Vector{T} - unaop_complexities::Vector{T} + op_complexities::NTuple{D,Vector{T}} variable_complexity::VC constant_complexity::T end Base.eltype(::ComplexityMapping{T}) where {T} = T +function Base.:(==)(a::ComplexityMapping, b::ComplexityMapping) + return a.use == b.use && + a.op_complexities == b.op_complexities && + a.variable_complexity == b.variable_complexity && + a.constant_complexity == b.constant_complexity +end + """Promote type when defining complexity mapping.""" function ComplexityMapping(; - binop_complexities::Vector{T1}, - unaop_complexities::Vector{T2}, - variable_complexity::Union{T3,AbstractVector{T3}}, - constant_complexity::T4, -) where {T1<:Real,T2<:Real,T3<:Real,T4<:Real} - T = promote_type(T1, T2, T3, T4) + op_complexities::Tuple{Vararg{Vector,D}}, + variable_complexity::Union{T2,AbstractVector{T2}}, + constant_complexity::T3, +) where {T2<:Real,T3<:Real,D} + T = promote_type(map(eltype, op_complexities)..., T2, T3) vc = map(T, variable_complexity) - return ComplexityMapping{T,typeof(vc)}( - true, - map(T, binop_complexities), - map(T, unaop_complexities), - vc, - T(constant_complexity), + return ComplexityMapping{T,typeof(vc),D}( + true, map(Base.Fix1(map, T), op_complexities), vc, T(constant_complexity) ) end -function ComplexityMapping( - ::Nothing, ::Nothing, ::Nothing, binary_operators, unary_operators -) +function ComplexityMapping(::Nothing, ::Nothing, ::Nothing, operators::Tuple) # If no customization provided, then we simply # turn off the complexity mapping use = false - return ComplexityMapping{Int,Int}(use, zeros(Int, 0), zeros(Int, 0), 0, 0) + return ComplexityMapping{Int,Int,length(operators)}( + use, ntuple(i -> Int[], Val(length(operators))), 0, 0 + ) end + function ComplexityMapping( complexity_of_operators, complexity_of_variables, complexity_of_constants, - binary_operators, - unary_operators, + operators::Tuple, ) _complexity_of_operators = if complexity_of_operators === nothing - Dict{Function,Int64}() + Dict{Any,Int64}() else # Convert to dict: Dict(complexity_of_operators) @@ -87,15 +88,11 @@ function ComplexityMapping( T = promote_type(VAR_T, CONST_T, OP_T) - # If not in dict, then just set it to 1. - binop_complexities = T[ - (haskey(_complexity_of_operators, op) ? _complexity_of_operators[op] : one(T)) # - for op in binary_operators - ] - unaop_complexities = T[ - (haskey(_complexity_of_operators, op) ? _complexity_of_operators[op] : one(T)) # - for op in unary_operators - ] + # Build operator complexities for each degree as vectors + op_complexities = ntuple( + i -> T[get(_complexity_of_operators, op, one(T)) for op in operators[i]], + Val(length(operators)), + ) variable_complexity = if complexity_of_variables !== nothing map(T, complexity_of_variables) @@ -108,9 +105,7 @@ function ComplexityMapping( one(T) end - return ComplexityMapping(; - binop_complexities, unaop_complexities, variable_complexity, constant_complexity - ) + return ComplexityMapping(; op_complexities, variable_complexity, constant_complexity) end """ @@ -118,9 +113,11 @@ Controls level of specialization we compile into `Options`. Overload if needed for custom expression types. """ -operator_specialization( +function operator_specialization( ::Type{O}, ::Type{<:AbstractExpression} -) where {O<:AbstractOperatorEnum} = O +) where {O<:AbstractOperatorEnum} + return O +end @unstable operator_specialization(::Type{<:OperatorEnum}, ::Type{<:AbstractExpression}) = OperatorEnum @@ -180,6 +177,8 @@ abstract type AbstractOptions end struct Options{ CM<:Union{ComplexityMapping,Function}, OP<:AbstractOperatorEnum, + NOPS<:Tuple, + OP_CONSTRAINTS<:Tuple{Vararg{Vector{<:Union{Int,Tuple{Vararg{Int}}}}}}, N<:AbstractExpressionNode, E<:AbstractExpression, EO<:NamedTuple, @@ -191,8 +190,8 @@ struct Options{ print_precision, } <: AbstractOptions operators::OP - bin_constraints::Vector{Tuple{Int,Int}} - una_constraints::Vector{Int} + op_constraints::OP_CONSTRAINTS + nested_constraints::Union{Vector{Tuple{Int,Int,Vector{Tuple{Int,Int,Int}}}},Nothing} complexity_mapping::CM tournament_selection_n::Int tournament_selection_p::Float32 @@ -224,17 +223,18 @@ struct Options{ ncycles_per_iteration::Int fraction_replaced::Float32 fraction_replaced_hof::Float32 + fraction_replaced_guesses::Float32 topn::Int verbosity::Union{Int,Nothing} v_print_precision::Val{print_precision} save_to_file::Bool probability_negate_constant::Float32 - nuna::Int - nbin::Int + nops::NOPS seed::Union{Int,Nothing} elementwise_loss::Union{SupervisedLoss,Function} loss_function::Union{Nothing,Function} loss_function_expression::Union{Nothing,Function} + loss_scale::Symbol node_type::Type{N} expression_type::Type{E} expression_options::EO @@ -253,7 +253,6 @@ struct Options{ max_evals::Union{Int,Nothing} input_stream::IO skip_mutation_failures::Bool - nested_constraints::Union{Vector{Tuple{Int,Int,Vector{Tuple{Int,Int,Int}}}},Nothing} deterministic::Bool define_helper_functions::Bool use_recorder::Bool @@ -263,13 +262,13 @@ function Base.print(io::IO, @nospecialize(options::Options)) return print( io, "Options(" * - "binops=$(options.operators.binops), " * - "unaops=$(options.operators.unaops), " + "operators=$(options.operators), " # Fill in remaining fields automatically: * join( [ - if fieldname in (:optimizer_options, :mutation_weights) + if fieldname in + (:optimizer_algorithm, :optimizer_options, :mutation_weights) "$(fieldname)=..." else "$(fieldname)=$(getfield(options, fieldname))" @@ -300,4 +299,40 @@ end end end +struct WarmStartIncompatibleError <: Exception + fields::Vector{Symbol} +end + +function Base.showerror(io::IO, e::WarmStartIncompatibleError) + print(io, "Warm start incompatible due to changed field(s): ") + join(io, e.fields, ", ") + return print(io, ". Use `fit!(mach, force=true)` to restart training.") +end + +check_warm_start_compatibility(::AbstractOptions, ::AbstractOptions) = nothing # LCOV_EXCL_LINE + +function check_warm_start_compatibility(old_options::Options, new_options::Options) + incompatible_fields = ( + :operators, + :op_constraints, + :nested_constraints, + :complexity_mapping, + :dimensionless_constants_only, + :maxsize, + :maxdepth, + :populations, + :population_size, + :node_type, + :expression_type, + :expression_options, + ) + + changed = [ + f for f in incompatible_fields if + getproperty(old_options, f) != getproperty(new_options, f) + ] + isempty(changed) || throw(WarmStartIncompatibleError(changed)) + return nothing +end + end diff --git a/src/ParametricExpression.jl b/src/ParametricExpression.jl index 6b5f4e61e..185753b8a 100644 --- a/src/ParametricExpression.jl +++ b/src/ParametricExpression.jl @@ -196,10 +196,37 @@ IDE.handles_class_column(::Type{<:ParametricExpression}) = true """ ParametricExpressionSpec <: AbstractExpressionSpec +!!! warning + `ParametricExpressionSpec` is no longer recommended. Please use `@template_spec` (creating a `TemplateExpressionSpec`) instead. + (Experimental) Specification for parametric expressions with configurable maximum parameters. """ -Base.@kwdef struct ParametricExpressionSpec <: AbstractExpressionSpec +struct ParametricExpressionSpec <: AbstractExpressionSpec max_parameters::Int + + function ParametricExpressionSpec(; max_parameters::Int, warn::Bool=true) + # Build a generic deprecation message + msg = """ + ParametricExpressionSpec is no longer recommended – it is both faster, safer, and more explicit to + use TemplateExpressionSpec with the `@template_spec` macro instead. + + Example with @template_spec macro: + + n_categories = length(unique(X.class)) + expression_spec = @template_spec( + expressions=(f,), + parameters=($(join(["p$i=n_categories" for i in 1:max_parameters], ", "))), + ) do x, #= other variable names..., =# category #= additional category feature =# + f(x1, #= other variable names..., =# $(join(["p$i[category]" for i in 1:max_parameters], ", "))) + end + + Then, when passing your dataset, include another feature with the category column. + """ + + warn && @warn msg maxlog = 1 + + return new(max_parameters) + end end # COV_EXCL_START diff --git a/src/ProgramConstants.jl b/src/ProgramConstants.jl index 7ae2ccd7b..ae09a5177 100644 --- a/src/ProgramConstants.jl +++ b/src/ProgramConstants.jl @@ -2,7 +2,7 @@ module ProgramConstantsModule const RecordType = Dict{String,Any} -const DATA_TYPE = Number +const DATA_TYPE = Any const LOSS_TYPE = Real end diff --git a/src/SearchUtils.jl b/src/SearchUtils.jl index 696e596b3..e568bfd7c 100644 --- a/src/SearchUtils.jl +++ b/src/SearchUtils.jl @@ -11,18 +11,47 @@ using StyledStrings: @styled_str using DispatchDoctor: @unstable using Logging: AbstractLogger -using DynamicExpressions: AbstractExpression, string_tree +using DynamicExpressions: + AbstractExpression, string_tree, parse_expression, EvalOptions, with_type_parameters using ..UtilsModule: subscriptify using ..CoreModule: Dataset, AbstractOptions, Options, RecordType, max_features using ..ComplexityModule: compute_complexity using ..PopulationModule: Population using ..PopMemberModule: AbstractPopMember, PopMember using ..HallOfFameModule: HallOfFame, string_dominating_pareto_curve +using ..ConstantOptimizationModule: optimize_constants using ..ProgressBarsModule: WrappedProgressBar, manually_iterate!, barlen using ..AdaptiveParsimonyModule: RunningSearchStatistics +using ..ExpressionBuilderModule: strip_metadata +using ..InterfaceDynamicExpressionsModule: takes_eval_options +using ..CheckConstraintsModule: check_constraints function logging_callback! end +""" + @filtered_async expr + +Like `@async` but with error monitoring that ignores `Distributed.ProcessExitedException` +to avoid spam when worker processes exit normally. +""" +macro filtered_async(expr) + return esc( + quote + $(Base).errormonitor( + @async begin + try + $expr + catch ex + if !(ex isa $(Distributed).ProcessExitedException) + rethrow(ex) + end + end + end + ) + end, + ) +end + """ AbstractRuntimeOptions @@ -54,6 +83,7 @@ struct RuntimeOptions{PARALLELISM,DIM_OUT,RETURN_STATE,LOGGER} <: AbstractRuntim numprocs::Int64 init_procs::Union{Vector{Int},Nothing} addprocs_function::Function + worker_timeout::Float64 exeflags::Cmd worker_imports::Union{Vector{Symbol},Nothing} runtests::Bool @@ -90,6 +120,7 @@ end procs::Union{Vector{Int},Nothing}=nothing, addprocs_function::Union{Function,Nothing}=nothing, heap_size_hint_in_bytes::Union{Integer,Nothing}=nothing, + worker_timeout::Union{Real,Nothing}=nothing, worker_imports::Union{Vector{Symbol},Nothing}=nothing, runtests::Bool=true, return_state::VRS=nothing, @@ -117,39 +148,39 @@ end :serial end if concurrency in (:multithreading, :serial) - numprocs !== nothing && error( + !isnothing(numprocs) && error( "`numprocs` should not be set when using `parallelism=$(parallelism)`. Please use `:multiprocessing`.", ) - procs !== nothing && error( + !isnothing(procs) && error( "`procs` should not be set when using `parallelism=$(parallelism)`. Please use `:multiprocessing`.", ) end - verbosity !== nothing && - options_verbosity !== nothing && + !isnothing(verbosity) && + !isnothing(options_verbosity) && error( "You cannot set `verbosity` in both the search parameters " * "`AbstractOptions` and the call to `equation_search`.", ) - progress !== nothing && - options_progress !== nothing && + !isnothing(progress) && + !isnothing(options_progress) && error( "You cannot set `progress` in both the search parameters " * "`AbstractOptions` and the call to `equation_search`.", ) - ORS !== nothing && - return_state !== nothing && + !isnothing(ORS) && + !isnothing(return_state) && error( "You cannot set `return_state` in both the `AbstractOptions` and in the passed arguments.", ) - _numprocs::Int = if numprocs === nothing - if procs === nothing + _numprocs::Int = if isnothing(numprocs) + if isnothing(procs) 4 else length(procs) end else - if procs === nothing + if isnothing(procs) numprocs else @assert length(procs) == numprocs @@ -162,15 +193,22 @@ end _verbosity = something(verbosity, options_verbosity, 1) _progress = something(progress, options_progress, (_verbosity > 0) && nout == 1) _addprocs_function = something(addprocs_function, addprocs) + _worker_timeout = Float64( + something( + worker_timeout, + tryparse(Float64, get(ENV, "JULIA_WORKER_TIMEOUT", "")), + max(60, _numprocs^2), + ), + ) _run_id = @something(run_id, generate_run_id()) - exeflags = if concurrency == :multiprocessing + exeflags = if concurrency == :multiprocessing && isnothing(procs) heap_size_hint_in_megabytes = floor( Int, (@something(heap_size_hint_in_bytes, (Sys.free_memory() / _numprocs))) / 1024^2, ) _verbosity > 0 && - heap_size_hint_in_bytes === nothing && + isnothing(heap_size_hint_in_bytes) && @info "Automatically setting `--heap-size-hint=$(heap_size_hint_in_megabytes)M` on each Julia process. You can configure this with the `heap_size_hint_in_bytes` parameter." `--heap-size=$(heap_size_hint_in_megabytes)M` @@ -183,6 +221,7 @@ end _numprocs, procs, _addprocs_function, + _worker_timeout, exeflags, worker_imports, runtests, @@ -261,9 +300,9 @@ macro sr_spawner(expr, kws...) if $(parallelism) == :serial $(expr) elseif $(parallelism) == :multiprocessing - @spawnat($(worker_idx), $(expr)) + $(Distributed).@spawnat($(worker_idx), $(expr)) elseif $(parallelism) == :multithreading - Threads.@spawn($(expr)) + $(Threads).@spawn($(expr)) else error("Invalid parallel type ", string($(parallelism)), ".") end @@ -562,6 +601,7 @@ Base.@kwdef struct SearchState{T,L,N<:AbstractExpression{T},WorkerOutputType,Cha cur_maxsizes::Vector{Int} stdin_reader::StdinReader record::Base.RefValue{RecordType} + seed_members::Vector{Vector{PopMember{T,L,N}}} end function save_to_file( @@ -685,6 +725,9 @@ function update_hall_of_fame!( if !valid_size continue end + if !check_constraints(member.tree, options, options.maxsize, size) + continue + end not_filled = !hall_of_fame.exists[size] better_than_current = member.cost < hall_of_fame.members[size].cost if not_filled || better_than_current @@ -694,4 +737,103 @@ function update_hall_of_fame!( end end +function _parse_guess_expression( + ::Type{T}, g::AbstractExpression, ::Dataset, ::AbstractOptions +) where {T} + return copy(g) +end + +@unstable function _parse_guess_expression( + ::Type{T}, g::NamedTuple, dataset::Dataset, options::AbstractOptions +) where {T} + # Check if any expression in the NamedTuple uses actual variable names instead of placeholder syntax + for expr_str in values(g), var_name in dataset.variable_names + if occursin(Regex("\\b\\Q$(var_name)\\E\\b"), expr_str) + throw( + ArgumentError( + "Found variable name '$(var_name)' in TemplateExpression guess. " * + "Use placeholder syntax '#1', '#2', etc., (for argument 1, 2, etc.) instead of actual variable names.", + ), + ) + end + end + + eval_options_kws = if takes_eval_options(options.operators) + (; eval_options=EvalOptions(; options.turbo, options.bumper)) + else + NamedTuple() + end + return parse_expression( + g; + expression_type=options.expression_type, + operators=options.operators, + variable_names=nothing, # Don't pass dataset variable names - let custom parse_expression handle #N placeholders + node_type=with_type_parameters(options.node_type, T), + expression_options=options.expression_options, + eval_options_kws..., + ) +end + +@unstable function _parse_guess_expression( + ::Type{T}, g, dataset::Dataset, options::AbstractOptions +) where {T} + return parse_expression( + g; + operators=options.operators, + variable_names=dataset.variable_names, + node_type=with_type_parameters(options.node_type, T), + expression_type=options.expression_type, + ) +end + +"""Parse user-provided guess expressions and convert them into optimized +`PopMember` objects for each output dataset.""" +function parse_guesses( + ::Type{P}, + guesses::Union{AbstractVector,AbstractVector{<:AbstractVector}}, + datasets::Vector{D}, + options::AbstractOptions, +) where {T,L,P<:AbstractPopMember{T,L},D<:Dataset{T,L}} + nout = length(datasets) + out = [P[] for _ in 1:nout] + guess_lists = _make_vector_vector(guesses, nout) + for j in 1:nout + dataset = datasets[j] + for g in guess_lists[j] + ex = _parse_guess_expression(T, g, dataset, options) + member = PopMember(dataset, ex, options; deterministic=options.deterministic) + if options.should_optimize_constants + member, _ = optimize_constants(dataset, member, options) + end + member = strip_metadata(member, options, dataset) + + # Check if guess expression exceeds maxsize and warn + complexity = compute_complexity(member.tree, options) + if complexity > options.maxsize + expr_str = string_tree(member.tree, options) + @warn "Guess expression '$expr_str' has complexity $complexity > maxsize ($(options.maxsize))." + end + + push!(out[j], member) + end + end + return out +end +function _make_vector_vector(guesses, nout) + if nout == 1 + if guesses isa AbstractVector{<:AbstractVector} + @assert length(guesses) == nout + return guesses + else + return [guesses] + end + else # nout > 1 + if !(guesses isa AbstractVector{<:AbstractVector}) + throw(ArgumentError("`guesses` must be a vector of vectors when `nout > 1`")) + end + @assert length(guesses) == nout + return guesses + end +end + end diff --git a/src/SymbolicRegression.jl b/src/SymbolicRegression.jl index ec9a31742..6871fa955 100644 --- a/src/SymbolicRegression.jl +++ b/src/SymbolicRegression.jl @@ -110,6 +110,7 @@ using DynamicExpressions: AbstractExpressionNode, ExpressionInterface, OperatorEnum, + GenericOperatorEnum, @parse_expression, parse_expression, @declare_expression_operator, @@ -219,6 +220,7 @@ using DispatchDoctor: @stable include("ConstantOptimization.jl") include("Population.jl") include("HallOfFame.jl") + include("ExpressionBuilder.jl") include("Mutate.jl") include("RegularizedEvolution.jl") include("SingleIteration.jl") @@ -226,7 +228,6 @@ using DispatchDoctor: @stable include("Migration.jl") include("SearchUtils.jl") include("Logging.jl") - include("ExpressionBuilder.jl") include("ComposableExpression.jl") include("TemplateExpression.jl") include("TemplateExpressionMacro.jl") @@ -243,10 +244,14 @@ using .CoreModule: AbstractOptions, Options, ComplexityMapping, + WarmStartIncompatibleError, AbstractMutationWeights, MutationWeights, AbstractExpressionSpec, ExpressionSpec, + init_value, + sample_value, + mutate_value, get_safe_op, max_features, is_weighted, @@ -289,12 +294,9 @@ using .CheckConstraintsModule: check_constraints using .AdaptiveParsimonyModule: RunningSearchStatistics, update_frequencies!, move_window!, normalize_frequencies! using .MutationFunctionsModule: - gen_random_tree, - gen_random_tree_fixed_size, - random_node, - random_node_and_parent, - crossover_trees -using .InterfaceDynamicExpressionsModule: @extend_operators + gen_random_tree, gen_random_tree_fixed_size, random_node, crossover_trees +using .InterfaceDynamicExpressionsModule: + @extend_operators, require_copy_to_workers, make_example_inputs using .LossFunctionsModule: eval_loss, eval_cost, update_baseline_loss!, score_func using .PopMemberModule: AbstractPopMember, PopMember, reset_birth! using .PopulationModule: Population, best_sub_pop, record_population, best_of_sample @@ -316,6 +318,7 @@ using .SearchUtilsModule: get_worker_output_type, extract_from_worker, @sr_spawner, + @filtered_async, StdinReader, watch_stream, close_reader!, @@ -335,12 +338,14 @@ using .SearchUtilsModule: save_to_file, get_cur_maxsize, update_hall_of_fame!, + parse_guesses, logging_callback! using .LoggingModule: AbstractSRLogger, SRLogger, get_logger using .TemplateExpressionModule: - TemplateExpression, TemplateStructure, TemplateExpressionSpec, ParamVector -using .TemplateExpressionModule: ValidVector -using .ComposableExpressionModule: ComposableExpression + TemplateExpression, TemplateStructure, TemplateExpressionSpec, ParamVector, has_params +using .TemplateExpressionModule: ValidVector, TemplateReturnError +using .ComposableExpressionModule: + ComposableExpression, ValidVectorMixError, ValidVectorAccessError using .ExpressionBuilderModule: embed_metadata, strip_metadata using .ParametricExpressionModule: ParametricExpressionSpec using .TemplateExpressionMacroModule: @template_spec @@ -409,6 +414,9 @@ which is useful for debugging and profiling. is close to the recommended size. This is important for long-running distributed jobs where each process has an independent memory, and can help avoid out-of-memory errors. By default, this is set to `Sys.free_memory() / numprocs`. +- `worker_timeout::Union{Real,Nothing}=nothing`: Timeout in seconds for worker processes + to establish connection with the master process. If `JULIA_WORKER_TIMEOUT` is already set, + that value is used. Otherwise defaults to `max(60, numprocs^2)`. - `worker_imports::Union{Vector{Symbol},Nothing}=nothing`: If you want to import additional modules on each worker, pass them here as a vector of symbols. By default some of the extensions will automatically be loaded when needed. @@ -441,6 +449,11 @@ which is useful for debugging and profiling. - `y_units=nothing`: The units of the output, to be used for dimensional constraints. If `y` is a matrix, then this can be a vector of units, in which case each element corresponds to each output feature. +- `guesses::Union{AbstractVector,AbstractVector{<:AbstractVector},Nothing}=nothing`: Initial + guess equations to seed the search. Examples: + - Single output: `["x1^2 + x2", "sin(x1) * x2"]` + - Multi-output: `[["x1 + x2"], ["x1 * x2", "x1 - x2"]]` + Constants will be automatically optimized. # Returns - `hallOfFame::HallOfFame`: The best equations seen during the search. @@ -463,6 +476,7 @@ function equation_search( procs::Union{Vector{Int},Nothing}=nothing, addprocs_function::Union{Function,Nothing}=nothing, heap_size_hint_in_bytes::Union{Integer,Nothing}=nothing, + worker_timeout::Union{Real,Nothing}=nothing, worker_imports::Union{Vector{Symbol},Nothing}=nothing, runtests::Bool=true, saved_state=nothing, @@ -475,6 +489,7 @@ function equation_search( X_units::Union{AbstractVector,Nothing}=nothing, y_units=nothing, extra::NamedTuple=NamedTuple(), + guesses::Union{AbstractVector,AbstractVector{<:AbstractVector},Nothing}=nothing, v_dim_out::Val{DIM_OUT}=Val(nothing), # Deprecated: multithreaded=nothing, @@ -513,6 +528,7 @@ function equation_search( procs=procs, addprocs_function=addprocs_function, heap_size_hint_in_bytes=heap_size_hint_in_bytes, + worker_timeout=worker_timeout, worker_imports=worker_imports, runtests=runtests, saved_state=saved_state, @@ -521,6 +537,7 @@ function equation_search( verbosity=verbosity, logger=logger, progress=progress, + guesses=guesses, v_dim_out=Val(DIM_OUT), ) end @@ -539,6 +556,7 @@ function equation_search( datasets::Vector{D}; options::AbstractOptions=Options(), saved_state=nothing, + guesses::Union{AbstractVector,AbstractVector{<:AbstractVector},Nothing}=nothing, runtime_options::Union{AbstractRuntimeOptions,Nothing}=nothing, runtime_options_kws..., ) where {T<:DATA_TYPE,L<:LOSS_TYPE,D<:Dataset{T,L}} @@ -554,15 +572,19 @@ function equation_search( ) # Underscores here mean that we have mutated the variable - return _equation_search(datasets, _runtime_options, options, saved_state) + return _equation_search(datasets, _runtime_options, options, saved_state, guesses) end @noinline function _equation_search( - datasets::Vector{D}, ropt::AbstractRuntimeOptions, options::AbstractOptions, saved_state + datasets::Vector{D}, + ropt::AbstractRuntimeOptions, + options::AbstractOptions, + saved_state, + guesses, ) where {D<:Dataset} _validate_options(datasets, ropt, options) state = _create_workers(PopMember, datasets, ropt, options) - _initialize_search!(state, datasets, ropt, options, saved_state) + _initialize_search!(state, datasets, ropt, options, saved_state, guesses) _warmup_search!(PopMember, state, datasets, ropt, options) _main_search_loop!(PopMember, state, datasets, ropt, options) _tear_down!(state, ropt, options) @@ -610,7 +632,7 @@ end nout = length(datasets) example_dataset = first(datasets) - example_ex = create_expression(zero(T), options, example_dataset) + example_ex = create_expression(init_value(T), options, example_dataset) NT = typeof(example_ex) PopType = Population{T,L,NT} HallOfFameType = HallOfFame{T,L,NT,PM{T,L,NT}} @@ -630,6 +652,7 @@ end procs=ropt.init_procs, ropt.numprocs, ropt.addprocs_function, + ropt.worker_timeout, options, worker_imports=ropt.worker_imports, project_path=splitdir(Pkg.project().path)[1], @@ -670,6 +693,8 @@ end j in 1:nout ] + seed_members = [PopMember{T,L,NT}[] for j in 1:nout] + return SearchState{T,L,typeof(example_ex),WorkerOutputType,ChannelType}(; procs=procs, we_created_procs=we_created_procs, @@ -687,6 +712,7 @@ end cur_maxsizes=cur_maxsizes, stdin_reader=stdin_reader, record=Ref(record), + seed_members=seed_members, ) end function _initialize_search!( @@ -695,6 +721,7 @@ function _initialize_search!( ropt::AbstractRuntimeOptions, options::AbstractOptions, saved_state, + guesses::Union{AbstractVector,AbstractVector{<:AbstractVector},Nothing}, ) where {T,L,N} nout = length(datasets) @@ -717,6 +744,16 @@ function _initialize_search!( end end + if !isnothing(guesses) + parsed_seed_members = parse_guesses( + eltype(state.halls_of_fame[1]), guesses, datasets, options + ) + for j in 1:nout + state.seed_members[j] = copy(parsed_seed_members[j]) + update_hall_of_fame!(state.halls_of_fame[j], parsed_seed_members[j], options) + end + end + for j in 1:nout, i in 1:(options.populations) worker_idx = assign_next_worker!( state.worker_assignment; out=j, pop=i, parallelism=ropt.parallelism, state.procs @@ -767,6 +804,22 @@ function _initialize_search!( end return nothing end + +function _preserve_loaded_state!( + state::AbstractSearchState{T,L,N}, + ropt::AbstractRuntimeOptions, + options::AbstractOptions, +) where {T,L,N} + nout = length(state.worker_output) + for j in 1:nout, i in 1:(options.populations) + (pop, _, _, _) = extract_from_worker( + state.worker_output[j][i], Population{T,L,N}, HallOfFame{T,L,N} + ) + state.last_pops[j][i] = copy(pop) + end + return nothing +end + function _warmup_search!( ::Type{PM}, state::AbstractSearchState{T,L,N}, @@ -774,6 +827,10 @@ function _warmup_search!( ropt::AbstractRuntimeOptions, options::AbstractOptions, ) where {T,L,N,PM<:AbstractPopMember} + if ropt.niterations == 0 + return _preserve_loaded_state!(state, ropt, options) + end + nout = length(datasets) for j in 1:nout, i in 1:(options.populations) dataset = datasets[j] @@ -844,9 +901,7 @@ function _main_search_loop!( if ropt.parallelism in (:multiprocessing, :multithreading) for j in 1:nout, i in 1:(options.populations) # Start listening for each population to finish: - t = Base.errormonitor( - @async put!(state.channels[j][i], fetch(state.worker_output[j][i])) - ) + t = @filtered_async put!(state.channels[j][i], fetch(state.worker_output[j][i])) push!(state.tasks[j], t) end end @@ -854,9 +909,9 @@ function _main_search_loop!( resource_monitor = ResourceMonitor(; # Storing n times as many monitoring intervals as populations seems like it will # help get accurate resource estimates: - max_recordings=options.populations * 100 * nout, - start_reporting_at=options.populations * 3 * nout, - window_size=options.populations * 2 * nout, + max_recordings=(options.populations * 100 * nout), + start_reporting_at=(options.populations * 3 * nout), + window_size=(options.populations * 2 * nout), ) while sum(state.cycles_remaining) > 0 kappa += 1 @@ -932,6 +987,13 @@ function _main_search_loop!( if options.hof_migration && length(dominating) > 0 migrate!(dominating => cur_pop, options; frac=options.fraction_replaced_hof) end + if !isempty(state.seed_members[j]) + migrate!( + state.seed_members[j] => cur_pop, + options; + frac=options.fraction_replaced_guesses, + ) + end ################################################################### state.cycles_remaining[j] -= 1 @@ -972,8 +1034,8 @@ function _main_search_loop!( worker_idx = worker_idx ) if ropt.parallelism in (:multiprocessing, :multithreading) - state.tasks[j][i] = Base.errormonitor( - @async put!(state.channels[j][i], fetch(state.worker_output[j][i])) + state.tasks[j][i] = @filtered_async put!( + state.channels[j][i], fetch(state.worker_output[j][i]) ) end @@ -1066,6 +1128,7 @@ function _tear_down!( close_reader!(state.stdin_reader) # Safely close all processes or threads if ropt.parallelism == :multiprocessing + # TODO: We should unwrap the error monitors here state.we_created_procs && rmprocs(state.procs) elseif ropt.parallelism == :multithreading nout = length(state.worker_output) @@ -1145,9 +1208,20 @@ function _info_dump( ropt::AbstractRuntimeOptions, options::AbstractOptions, ) where {D<:Dataset} + nout = length(state.halls_of_fame) + + # Ensure files are saved even when niterations=0, regardless of verbosity + if options.save_to_file + for j in 1:nout + hall_of_fame = state.halls_of_fame[j] + dataset = datasets[j] + dominating = calculate_pareto_frontier(hall_of_fame) + save_to_file(dominating, nout, j, dataset, options, ropt) + end + end + ropt.verbosity <= 0 && return nothing - nout = length(state.halls_of_fame) if nout > 1 @info "Final populations:" else @@ -1186,7 +1260,11 @@ end include("MLJInterface.jl") using .MLJInterfaceModule: - SRRegressor, MultitargetSRRegressor, SRTestRegressor, MultitargetSRTestRegressor + get_options, + SRRegressor, + MultitargetSRRegressor, + SRTestRegressor, + MultitargetSRTestRegressor # Hack to get static analysis to work from within tests: @ignore include("../test/runtests.jl") diff --git a/src/TemplateExpression.jl b/src/TemplateExpression.jl index 6d215c869..473ca1e67 100644 --- a/src/TemplateExpression.jl +++ b/src/TemplateExpression.jl @@ -321,29 +321,31 @@ struct TemplateExpression{ end function TemplateExpression( - trees::NamedTuple{<:Any,<:NTuple{<:Any,<:AbstractExpression}}; + trees::NamedTuple{<:Any,<:NTuple{<:Any,<:AbstractExpression{T}}}; structure::TemplateStructure, operators::Union{AbstractOperatorEnum,Nothing}=nothing, variable_names::Union{AbstractVector{<:AbstractString},Nothing}=nothing, parameters::Union{NamedTuple,Nothing}=nothing, -) +) where {T} example_tree = first(values(trees))::AbstractExpression operators = get_operators(example_tree, operators) variable_names = get_variable_names(example_tree, variable_names) - parameters = if has_params(structure) - @assert( - parameters !== nothing, - "Expected `parameters` to be provided for `structure.num_parameters=$(structure.num_parameters)`" - ) + final_parameters = if has_params(structure) + resolved_parameters = @something parameters begin + # Auto-initialize parameters to zeros when not provided + NamedTuple{keys(structure.num_parameters)}( + map(Base.Fix1(zeros, T), values(structure.num_parameters)) + ) + end for k in keys(structure.num_parameters) @assert( - length(parameters[k]) == structure.num_parameters[k], - "Expected `parameters.$k` to have length $(structure.num_parameters[k]), got $(length(parameters[k]))" + length(resolved_parameters[k]) == structure.num_parameters[k], + "Expected `parameters.$k` to have length $(structure.num_parameters[k]), got $(length(resolved_parameters[k]))" ) end # TODO: Delete this extra check once we are confident that it works NamedTuple{keys(structure.num_parameters)}( - map(p -> p isa ParamVector ? p : ParamVector(p::Vector), parameters) + map(p -> p isa ParamVector ? p : ParamVector(p::Vector), resolved_parameters), ) else @assert( @@ -352,7 +354,7 @@ function TemplateExpression( ) NamedTuple() end - metadata = (; structure, operators, variable_names, parameters) + metadata = (; structure, operators, variable_names, parameters=final_parameters) return TemplateExpression(trees, Metadata(metadata)) end @@ -501,7 +503,7 @@ function EB.create_expression( dataset::Dataset{T,L}, ::Type{<:AbstractExpressionNode}, ::Type{E}, - ::Val{embed}=Val(false), + (::Val{embed})=Val(false), ) where {T,L,embed,E<:TemplateExpression} function_keys = get_function_keys(options.expression_options.structure) @@ -629,6 +631,52 @@ function HOF.make_prefix(::TemplateExpression, ::AbstractOptions, ::Dataset) return "" end +struct TemplateReturnError <: Exception end + +function Base.showerror(io::IO, ::TemplateReturnError) + return print( + io, + """ +TemplateReturnError: Template expression returned a regular Vector, but ValidVector is required. + +Template expressions must return ValidVector for proper handling: + + ```julia + return ValidVector(my_data, computation_is_valid) + ``` + +The .valid field is used to track whether any upstream computation failed. +It's important to handle this correctly. + +Example of manually propagating validity: + + ```julia + _f_result = f(x1, x2) # Returns ValidVector + _g_result = g(x3) # Returns ValidVector + + # Combine results manually and propagate validity + combined_data = _f_result.x .+ _g_result.x + return ValidVector(combined_data, _f_result.valid && _g_result.valid) + ``` + +Note that normally we could simply write `_f_result + _g_result`, +and this would automatically handle the validity and vectorization. +""", + ) +end + +function _match_input_eltype( + ::Type{<:AbstractMatrix{T1}}, result::AbstractVector{T2} +) where {T1,T2} + if T1 != T2 && T1 <: AbstractFloat && T2 <: AbstractFloat + # Just to handle cases where the user might write + # 0.5 in their template spec, but the data is Float32. + return Base.Fix1(convert, T1).(result) + else + return result + end +end + @stable( default_mode = "disable", default_union_limit = 2, @@ -655,7 +703,11 @@ end extra_args..., map(x -> ValidVector(copy(x), true), eachrow(cX)), ) - return result.x, result.valid + # Validate that template expression returned a ValidVector + if !(result isa ValidVector) + throw(TemplateReturnError()) + end + return _match_input_eltype(typeof(cX), result.x), result.valid end function (ex::TemplateExpression)( X, operators::Union{AbstractOperatorEnum,Nothing}=nothing; kws... @@ -692,6 +744,7 @@ function MM.condition_mutation_weights!( @nospecialize(member::P), @nospecialize(options::AbstractOptions), curmaxsize::Int, + nfeatures::Int, ) where {T,L,N<:TemplateExpression,P<:AbstractPopMember{T,L,N}} if !preserve_sharing(typeof(member.tree)) weights.form_connection = 0.0 @@ -700,6 +753,11 @@ function MM.condition_mutation_weights!( MM.condition_mutate_constant!(typeof(member.tree), weights, member, options, curmaxsize) + # Disable feature mutation if only one feature available + if nfeatures <= 1 + weights.mutate_feature = 0.0 + end + complexity = ComplexityModule.compute_complexity(member, options) if complexity >= curmaxsize @@ -761,6 +819,12 @@ function MF.with_contents_for_mutation( ) return with_contents(ex, new_contents) end + +"""We only want to mutate to a valid number of features.""" +function MF.get_nfeatures_for_mutation(ex::TemplateExpression, ctx::Symbol, _::Int) + return get_metadata(ex).structure.num_features[ctx] +end + function MM.condition_mutate_constant!( ::Type{<:TemplateExpression}, weights::AbstractMutationWeights, @@ -836,11 +900,18 @@ function MF.mutate_constant( end # TODO: Look at other ParametricExpression behavior -function CO.count_constants_for_optimization(ex::TemplateExpression) - return ( - sum(CO.count_constants_for_optimization, values(get_contents(ex))) + - (has_params(ex) ? sum(values(get_metadata(ex).structure.num_parameters)) : 0) - ) +for f in (:(DE.count_scalar_constants), :(CO.count_constants_for_optimization)) + @eval function $f(ex::TemplateExpression) + return ( + sum($f, values(get_contents(ex))) + + (has_params(ex) ? sum($f, values(get_metadata(ex).parameters)) : 0) + ) + end + @eval function $f(p::ParamVector) + # TODO: This is not general enough; we should be using `get_scalar_constants` + # on the parameters themselves. + return length(p._data) + end end function CC.check_constraints( @@ -907,4 +978,115 @@ ES.get_expression_options(spec::TemplateExpressionSpec) = (; structure=spec.stru ES.get_node_type(::TemplateExpressionSpec) = Node # COV_EXCL_STOP +IDE.require_copy_to_workers(::Type{<:TemplateExpression}) = true # COV_EXCL_LINE +function IDE.make_example_inputs( + ::Type{<:TemplateExpression}, ::Type{T}, options, dataset +) where {T} + ex = EB.create_expression(CM.init_value(T), options, dataset) + raw_contents = get_contents(ex) + extra_args = has_params(ex) ? (get_metadata(ex).parameters,) : () + return (; + ops=(get_metadata(ex).structure.combine,), + example_inputs=( + raw_contents, + extra_args..., + map(x -> ValidVector(copy(x), true), eachrow(dataset.X)), + ), + ) +end + +""" + parse_expression(ex::NamedTuple; kws...) + +Extension of `parse_expression` to handle NamedTuple input for creating template expressions. +Each key in the NamedTuple should map to a string expression using #N placeholder syntax. + +# Example +```julia +# With expression_spec (recommended for template expressions): +spec = TemplateExpressionSpec(; structure=TemplateStructure{(:f, :g)}(...)) +parse_expression((; f="cos(#1) - 1.5", g="exp(#2) - #1"); expression_spec=spec, operators=operators, variable_names=["x1", "x2"]) + +# Or with explicit parameters: +parse_expression((; f="cos(#1) - 1.5", g="exp(#2) - #1"); expression_type=TemplateExpression, operators=operators, variable_names=["x1", "x2"]) +``` +""" +@unstable function DE.parse_expression( + ex::NamedTuple; + expression_spec::Union{ES.AbstractExpressionSpec,Nothing}=nothing, + expression_options::Union{NamedTuple,Nothing}=nothing, + eval_options::Union{EvalOptions,Nothing}=nothing, + operators::Union{AbstractOperatorEnum,Nothing}=nothing, + binary_operators::Union{Vector{<:Function},Nothing}=nothing, + unary_operators::Union{Vector{<:Function},Nothing}=nothing, + variable_names::Union{AbstractVector,Nothing}=nothing, + expression_type::Union{Type,Nothing}=nothing, + node_type::Union{Type,Nothing}=nothing, + kws..., +) + if expression_spec !== nothing + actual_expression_type = ES.get_expression_type(expression_spec) + actual_expression_options = ES.get_expression_options(expression_spec) + actual_node_type = ES.get_node_type(expression_spec) + else + actual_expression_type = something(expression_type, TemplateExpression) + actual_expression_options = expression_options + actual_node_type = something(node_type, Node) + end + + # COV_EXCL_START + @assert actual_expression_type <: TemplateExpression + @assert( + actual_expression_options !== nothing && + actual_expression_options.structure isa TemplateStructure, + "NamedTuple expressions require expression_options with a TemplateStructure" + ) + # COV_EXCL_STOP + + eval_options_kws = if eval_options !== nothing + (; eval_options) + else + NamedTuple() + end + + inner_expressions = NamedTuple{keys(ex)}( + map(values(ex)) do expr_str + max_var_index = 0 + for m in eachmatch(r"#(\d+)", expr_str) + capture = m.captures[1] + if capture !== nothing + var_idx = parse(Int, capture) + max_var_index = max(max_var_index, var_idx) + end + end + + placeholder_variable_names = ["__arg_$i" for i in 1:max_var_index] + expr_str = replace(expr_str, r"#(\d+)" => s"__arg_\1") + + parsed_expr = DE.parse_expression( + expr_str; + operators, + binary_operators, + unary_operators, + variable_names=placeholder_variable_names, + expression_type=DE.Expression, + node_type=actual_node_type, + kws..., + ) + + ComposableExpression( + parsed_expr.tree; operators, variable_names=nothing, eval_options_kws... + ) + end, + ) + + return actual_expression_type( + inner_expressions; + structure=actual_expression_options.structure, + operators, + variable_names=nothing, + kws..., + ) +end + end diff --git a/src/TemplateExpressionMacro.jl b/src/TemplateExpressionMacro.jl index b8b195ae2..0c02db1d9 100644 --- a/src/TemplateExpressionMacro.jl +++ b/src/TemplateExpressionMacro.jl @@ -2,15 +2,34 @@ module TemplateExpressionMacroModule """ @template_spec( - parameters=(p1=10, p2=10, p3=1), - expressions=(f, g), - ) do x1, x2, class - return p1[class] * g(x1^2) + f(x1, x2, p2[class]) - p3[1], + expressions=(f, g, ...), + [parameters=(p1=size1, p2=size2, ...)], + [num_features=(f=n1, g=n2, ...)] + ) do x1, x2, ... + # template function body end -(Experimental) Creates a TemplateExpressionSpec with the given parameters and expressions. -The parameters are used to define constants that can be indexed, and the -expressions define the function keys for the template structure. +Creates a TemplateExpressionSpec with a custom template structure for symbolic regression. + +This macro allows defining structured symbolic expressions with constrained composition +of sub-expressions and parameterized components. + +# Arguments +- `expressions`: A tuple of function names that will be composed in the template. +- `parameters`: Optional. A named tuple of parameter name-size pairs. These parameters + can be indexed and accessed in the template function. +- `num_features`: Optional. A named tuple specifying how many features each expression function can access. + Normally this will be inferred automatically from the template function. + +# Example +```julia +expr_spec = @template_spec( + parameters=(p1=10, p2=10, p3=1), + expressions=(f, g), +) do x1, x2, class + return p1[class] * g(x1^2) + f(x1, x2, p2[class]) - p3[1] +end +``` """ macro template_spec(f, args...) return esc(template_spec(f, args...)) @@ -20,6 +39,7 @@ function template_spec(func, args...) # Extract the parameters and expressions from the arguments parameters = nothing expressions = nothing + num_features = nothing for arg in args if Meta.isexpr(arg, :(=)) @@ -30,6 +50,9 @@ function template_spec(func, args...) elseif name == :expressions !isnothing(expressions) && error("cannot set `expressions` keyword twice") expressions = value + elseif name == :num_features + !isnothing(num_features) && error("cannot set `num_features` keyword twice") + num_features = value else error("unrecognized keyword $(name)") end @@ -85,14 +108,20 @@ function template_spec(func, args...) func_body = func.args[2] func_args = func_args.args + # For loading from checkpoint, or sharing across workers + function_hash = hash((function_keys, expr_names, func_args, func_body)) + # Create the TemplateStructure with or without parameters if isnothing(parameters) + function_name = Symbol(:__sr_template_, function_hash) + quote TemplateExpressionSpec(; structure=TemplateStructure{($(function_keys...),)}( - function ((; $(expr_names...)), ($(func_args...),)) + function $(function_name)((; $(expr_names...)), ($(func_args...),)) return $(func_body) - end + end; + num_features=($(num_features)), ), ) end @@ -101,15 +130,20 @@ function template_spec(func, args...) param_keys = Tuple(QuoteNode(p.args[1]) for p in parameters.args) param_names = [p.args[1] for p in parameters.args] + # For loading from checkpoint, or sharing across workers + function_hash_with_params = hash((param_keys, param_names), function_hash) + function_name = Symbol(:__sr_template_, function_hash_with_params) + quote TemplateExpressionSpec(; structure=TemplateStructure{($(function_keys...),),($(param_keys...),)}( - function ( + function $(function_name)( (; $(expr_names...)), (; $(param_names...)), ($(func_args...),) ) return $(func_body) end; - num_parameters=$(parameters), + num_features=($(num_features)), + num_parameters=($(parameters)), ), ) end diff --git a/src/Utils.jl b/src/Utils.jl index 48c18c677..77a8bd59f 100644 --- a/src/Utils.jl +++ b/src/Utils.jl @@ -4,6 +4,8 @@ module UtilsModule using Printf: @printf using MacroTools: splitdef using StyledStrings: StyledStrings +using Random: AbstractRNG, default_rng +using DispatchDoctor: @unstable macro ignore(args...) end @@ -91,12 +93,14 @@ function _to_vec(v::MutableTuple{S,T}) where {S,T} end """Return the bottom k elements of x, and their indices.""" -bottomk_fast(x::AbstractVector{T}, k) where {T} = Base.Cartesian.@nif( - 32, - d -> d == k, - d -> _bottomk_dispatch(x, Val(d))::Tuple{Vector{T},Vector{Int}}, - _ -> _bottomk_dispatch(x, Val(k))::Tuple{Vector{T},Vector{Int}} -) +function bottomk_fast(x::AbstractVector{T}, k) where {T} + Base.Cartesian.@nif( + 32, + d -> d == k, + d -> _bottomk_dispatch(x, Val(d))::Tuple{Vector{T},Vector{Int}}, + _ -> _bottomk_dispatch(x, Val(k))::Tuple{Vector{T},Vector{Int}} + ) +end function _bottomk_dispatch(x::AbstractVector{T}, ::Val{k}) where {T,k} if k == 1 @@ -142,14 +146,18 @@ function argmin_fast(x::AbstractVector{T}) where {T} return findmin_fast(x)[2] end -function poisson_sample(λ::T) where {T} +function poisson_sample(rng::AbstractRNG, λ::T) where {T} + iszero(λ) && return 0 k, p, L = 0, one(T), exp(-λ) while p > L k += 1 - p *= rand(T) + p *= rand(rng, T) end return k - 1 end +function poisson_sample(λ::T) where {T} + return poisson_sample(default_rng(), λ) +end macro threads_if(flag, ex) return quote @@ -203,7 +211,7 @@ json3_write(args...) = error("Please load the JSON3.jl package.") A per-task cache that allows us to avoid repeated locking. """ -mutable struct PerTaskCache{T,F<:Function} +struct PerTaskCache{T,F<:Function} constructor::F PerTaskCache{T}(constructor::F) where {T,F} = new{T,F}(constructor) @@ -284,4 +292,8 @@ function (f::FixKws{F,KWS})(args::Vararg{Any,N}) where {F,KWS,N} return f.f(args...; f.kws...) end +@unstable function stable_get!(f::F, dict, key) where {F} + return get!(f, dict, key)::(Base.promote_op(f)) +end + end diff --git a/test/Project.toml b/test/Project.toml index dd8c2741b..7b9a01e1d 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -7,9 +7,7 @@ DispatchDoctor = "8d63f2c5-f18a-4cf2-ba9d-b3f60fc568c8" Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b" DynamicExpressions = "a40a106e-89c9-4ca8-8020-a735e8728b6b" DynamicQuantities = "06fc5a27-2a28-4c7c-a15d-362465fb6821" -Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" -JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b" JSON3 = "0f8b85d8-7281-11e9-16c2-39a750bddbf1" LineSearches = "d3d80556-e9d4-5f37-9878-2ab0fcc64255" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" @@ -19,13 +17,14 @@ LoopVectorization = "bdcacae8-1622-11e9-2a5c-532679323890" MLJBase = "a7f614a8-145f-11e9-1d2a-a57a1082229d" MLJModelInterface = "e80e1ace-859a-464e-9ed9-23947d8ae3ea" MLJTestInterface = "72560011-54dd-4dc2-94f3-c5de45b75ecd" +Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" Optim = "429524aa-4258-5aef-a3af-852621145aeb" Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" SafeTestsets = "1bc83da4-3b8d-516f-aca4-4fe02f6d838f" +StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3" StyledStrings = "f489334b-da3d-4c2e-b8f0-e476e12c162b" Suppressor = "fd094767-a336-5f1f-9728-57cf17d0bbfb" -SymbolicRegression = "8254be44-1295-4e6a-a16d-46603ac705cb" SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b" TensorBoardLogger = "899adc3e-224a-11e9-021f-63837185c80f" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/test/autodiff_helpers.jl b/test/autodiff_helpers.jl new file mode 100644 index 000000000..1ef0e7368 --- /dev/null +++ b/test/autodiff_helpers.jl @@ -0,0 +1,93 @@ +using SymbolicRegression +using SymbolicRegression.ConstantOptimizationModule: + EvaluatorContext, Evaluator, GradEvaluator, specialized_options +using DynamicExpressions +using DifferentiationInterface: value_and_gradient +using Test +using Zygote: Zygote +using DifferentiationInterface: AutoZygote + +""" +Create and set up all the test data needed for parametric expression autodiff tests. +Returns everything needed for both Zygote and Enzyme tests. +""" +function setup_parametric_test(rng) + X = rand(rng, 2, 32) + true_params = [0.5 2.0] + init_params = [0.1 0.2] + init_constants = [2.5, -0.5] + class = rand(rng, 1:2, 32) + y = [ + X[1, i] * X[1, i] - cos(2.6 * X[2, i] - 0.2) + true_params[1, class[i]] for + i in 1:32 + ] + + dataset = Dataset(X, y; extra=(; class)) + + # Calculate the reference values using AutoZygote + (true_val, (true_d_params, true_d_constants)) = + value_and_gradient(AutoZygote(), (init_params, init_constants)) do (params, c) + pred = [ + X[1, i] * X[1, i] - cos(c[1] * X[2, i] + c[2]) + params[1, class[i]] for + i in 1:32 + ] + sum(abs2, pred .- y) / length(y) + end + + return X, + dataset, init_params, init_constants, true_val, true_d_params, + true_d_constants +end + +""" +Create the parametric expression for testing. +""" +function create_parametric_expression(init_params, operators) + ex = @parse_expression( + x * x - cos(2.5 * y + -0.5) + p1, + operators = operators, + expression_type = ParametricExpression, + variable_names = ["x", "y"], + extra_metadata = (parameter_names=["p1"], parameters=init_params) + ) + + return ex +end + +""" +Test the autodiff backend against the reference values. +This matches the original test_backend function's behavior. +""" +function test_autodiff_backend( + ex, + dataset, + true_val, + true_d_constants, + true_d_params, + options, + backend; + allow_failure=false, +) + x0, refs = get_scalar_constants(ex) + G = zero(x0) + + ctx = EvaluatorContext(dataset, options) + f = Evaluator(ex, refs, ctx) + fg! = GradEvaluator(f, backend) + + @test f(x0) ≈ true_val + + try + val = fg!(nothing, G, x0) + @test val ≈ true_val + @test G ≈ vcat(true_d_constants[:], true_d_params[:]) + return true + catch e + if allow_failure + @warn "Expected failure" e + return false + else + rethrow(e) + end + end +end diff --git a/test/runtests.jl b/test/runtests.jl index fbebb863f..7aef02fea 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -2,13 +2,18 @@ using TestItems: @testitem using TestItemRunner: @run_package_tests ENV["SYMBOLIC_REGRESSION_TEST"] = "true" -tags_to_run = let t = get(ENV, "SYMBOLIC_REGRESSION_TEST_SUITE", "part1,part2,part3") - t = split(t, ",") - t = map(Symbol, t) - t -end -@eval @run_package_tests filter = ti -> !isdisjoint(ti.tags, $tags_to_run) verbose = true +const SYMBOLIC_REGRESSION_TEST_SUITE = get(ENV, "SYMBOLIC_REGRESSION_TEST_SUITE", "") +const SYMBOLIC_REGRESSION_TEST_NAMES = get(ENV, "SYMBOLIC_REGRESSION_TEST_NAMES", "") +tags_to_run = map(Symbol, filter(!isempty, split(SYMBOLIC_REGRESSION_TEST_SUITE, ","))) +names_to_run = filter(!isempty, split(SYMBOLIC_REGRESSION_TEST_NAMES, ",")) +test_filter = if !isempty(names_to_run) + ti -> any(name -> occursin(name, ti.name), names_to_run) +else + tags_to_run = isempty(tags_to_run) ? [:part1, :part2, :part3] : tags_to_run + ti -> !isdisjoint(ti.tags, tags_to_run) +end +@run_package_tests(filter = test_filter, verbose = true) # TODO: This is a very slow test include("test_operators.jl") @@ -76,10 +81,7 @@ end end include("test_complexity.jl") - -@testitem "Test options" tags = [:part1] begin - include("test_options.jl") -end +include("test_options.jl") @testitem "Test hash of tree" tags = [:part2] begin include("test_hash.jl") @@ -129,6 +131,11 @@ end include("../examples/parameterized_function.jl") end +@testitem "Running custom types example." tags = [:part3] begin + ENV["SYMBOLIC_REGRESSION_IS_TESTING"] = "true" + include("../examples/custom_types.jl") +end + @testitem "Testing whether the recorder works." tags = [:part3] begin include("test_recorder.jl") end @@ -137,15 +144,12 @@ end include("test_deterministic.jl") end -@testitem "Testing whether early stop criteria works." tags = [:part2] begin - include("test_early_stop.jl") -end +include("test_early_stop.jl") include("test_mlj.jl") -@testitem "Testing whether we can move operators to workers." tags = [:part1] begin - include("test_custom_operators_multiprocessing.jl") -end +include("test_custom_operators_multiprocessing.jl") +include("test_filtered_async.jl") @testitem "Testing whether we can move loss function expression to workers." tags = [:part2] begin include("test_loss_function_expression_multiprocessing.jl") @@ -159,24 +163,25 @@ end include("test_custom_objectives.jl") end -@testitem "Test abstract numbers" tags = [:part1] begin - include("test_abstract_numbers.jl") -end +include("test_abstract_numbers.jl") include("test_logging.jl") include("test_pretty_printing.jl") include("test_expression_builder.jl") +include("test_guesses.jl") include("test_composable_expression.jl") include("test_parametric_template_expressions.jl") +include("test_template_expression.jl") include("test_template_macro.jl") include("test_template_expression_mutation.jl") include("test_template_expression_string.jl") +include("test_loss_scale.jl") @testitem "Aqua tests" tags = [:part2, :aqua] begin include("test_aqua.jl") end -@testitem "JET tests" tags = [:part1, :jet] begin +@testitem "JET tests" tags = [:jet] begin test_jet_file = joinpath((@__DIR__), "test_jet.jl") run(`$(Base.julia_cmd()) --startup-file=no $test_jet_file`) end diff --git a/test/test_abstract_numbers.jl b/test/test_abstract_numbers.jl index f127995ae..1cb3f51d1 100644 --- a/test/test_abstract_numbers.jl +++ b/test/test_abstract_numbers.jl @@ -1,37 +1,56 @@ -using SymbolicRegression -using Random -include("test_params.jl") - -get_base_type(::Type{<:Complex{BT}}) where {BT} = BT -early_stop(loss::L, c) where {L} = ((loss <= L(1e-2)) && (c <= 15)) -example_loss(prediction, target) = abs2(prediction - target) - -options = SymbolicRegression.Options(; - binary_operators=[+, *, -, /], - unary_operators=[cos], - populations=20, - early_stop_condition=early_stop, - elementwise_loss=example_loss, -) - -for T in (ComplexF16, ComplexF32, ComplexF64) - L = get_base_type(T) - @testset "Test search with $T type" begin - X = randn(MersenneTwister(0), T, 1, 100) - y = @. (2 - 0.5im) * cos((1 + 1im) * X[1, :]) |> T - - dataset = Dataset(X, y, L) - hof = if T == ComplexF16 - equation_search([dataset]; options=options, niterations=1_000_000_000) - else - # Should automatically find correct type: - equation_search(X, y; options=options, niterations=1_000_000_000) - end +@testitem "Abstract numbers" tags = [:part1] begin + using SymbolicRegression + using Random + include("test_params.jl") + + get_base_type(::Type{<:Complex{BT}}) where {BT} = BT + early_stop(loss::L, c) where {L} = ((loss <= L(1e-2)) && (c <= 15)) + example_loss(prediction, target) = abs2(prediction - target) + + options = SymbolicRegression.Options(; + binary_operators=[+, *, -, /], + unary_operators=[cos], + populations=20, + early_stop_condition=early_stop, + elementwise_loss=example_loss, + ) + + for T in (ComplexF16, ComplexF32, ComplexF64) + L = get_base_type(T) + @testset "Test search with $T type" begin + X = randn(MersenneTwister(0), T, 1, 100) + y = @. (2 - 0.5im) * cos((1 + 1im) * X[1, :]) |> T + + dataset = Dataset(X, y, L) + hof = if T == ComplexF16 + equation_search([dataset]; options=options, niterations=1_000_000_000) + else + # Should automatically find correct type: + equation_search(X, y; options=options, niterations=1_000_000_000) + end - dominating = calculate_pareto_frontier(hof) - @test typeof(dominating[end].loss) == L - output, _ = eval_tree_array(dominating[end].tree, X, options) - @test typeof(output) <: AbstractArray{T} - @test sum(abs2, output .- y) / length(output) <= L(1e-2) + dominating = calculate_pareto_frontier(hof) + @test typeof(dominating[end].loss) == L + output, _ = eval_tree_array(dominating[end].tree, X, options) + @test typeof(output) <: AbstractArray{T} + @test sum(abs2, output .- y) / length(output) <= L(1e-2) + end end end + +@testitem "Testing error handling in InterfaceDataTypesModule" tags = [:part1] begin + using SymbolicRegression: init_value, sample_value, mutate_value + using Random + + struct CustomTestType end + + rng = Random.MersenneTwister(0) + options = Options() + @test_throws "No `init_value` method defined for type" init_value(CustomTestType) + @test_throws "No `sample_value` method defined for type" sample_value( + rng, CustomTestType, options + ) + @test_throws "No `mutate_value` method defined for type" mutate_value( + rng, CustomTestType(), 0.5, options + ) +end diff --git a/test/test_composable_expression.jl b/test/test_composable_expression.jl index 21b48f6ec..a67a8a03c 100644 --- a/test/test_composable_expression.jl +++ b/test/test_composable_expression.jl @@ -129,3 +129,89 @@ end x2_val = ValidVector([1.0, 2.0], false) @test ex(x1_val, x2_val).valid == false end + +@testitem "ValidVector helpful error messages" tags = [:part2] begin + using SymbolicRegression + using SymbolicRegression: ValidVector, ValidVectorMixError, ValidVectorAccessError + + vv = ValidVector([1.0, 2.0], true) + v = [3.0, 4.0] + + # Helper function to get error message + get_error_msg(err) = + let io = IOBuffer() + Base.showerror(io, err) + String(take!(io)) + end + + # Test vector arithmetic errors encourage ValidVector wrapping + err_mix = @test_throws ValidVectorMixError vv + v + @test_throws ValidVectorMixError v * vv # Test other direction too + + mix_msg = get_error_msg(err_mix.value) + @test contains( + mix_msg, + "ValidVector handles validity checks, auto-vectorization, and batching in template expressions", + ) + + # Test array access errors mention .x and .valid + err_access = @test_throws ValidVectorAccessError vv[1] + @test_throws ValidVectorAccessError length(vv) + @test_throws ValidVectorAccessError push!(vv, 5.0) + + access_msg = get_error_msg(err_access.value) + @test contains(access_msg, "valid_ar.x[1]") + @test contains(access_msg, "valid_ar.valid") + @test contains(access_msg, "length(valid_ar.x)") + @test contains(access_msg, "doesn't support direct array operations") + @test contains(access_msg, "ValidVector handles validity/batching automatically") +end + +@testitem "Test Number inputs" tags = [:part2] begin + using SymbolicRegression: ComposableExpression, Node, ValidVector + using DynamicExpressions: OperatorEnum + + operators = OperatorEnum(; binary_operators=(+, *)) + x1 = ComposableExpression(Node{Float64}(; feature=1); operators) + x2 = ComposableExpression(Node{Float64}(; feature=2); operators) + ex = x1 + x2 + + @test ex(2.0, 3.0) ≈ 5.0 + @test isnan(ex(NaN, 3.0)) + @test ex(ValidVector([1.0], true), 2.0).x ≈ [3.0] + @test ex(ValidVector([1.0, 1.0], true), 2.0).x ≈ [3.0, 3.0] + @test ex(ValidVector([1.0, 1.0], false), 2.0).valid == false +end + +@testitem "ValidVector operations with Union{} return type" tags = [:part2] begin + using SymbolicRegression: ValidVector + using SymbolicRegression.ComposableExpressionModule: apply_operator, _match_eltype + + error_op(::Any, ::Any) = error("This should cause Union{} inference") + + x = ValidVector([1.0, 2.0], false) + y = ValidVector([3.0, 4.0], false) + + result = apply_operator(error_op, x, y) + @test result isa ValidVector + @test !result.valid + @test result.x == [1.0, 2.0] + + a = ValidVector(Float32[1.0, 2.0], false) + b = 1.0 + result2 = apply_operator(*, a, b) + @test result2 isa ValidVector{<:AbstractArray{Float64}} + + # Test apply_operator when all inputs are valid + valid_x = ValidVector([1.0, 2.0], true) + valid_y = ValidVector([3.0, 4.0], true) + valid_result = apply_operator(+, valid_x, valid_y) + @test valid_result.valid == true + @test valid_result.x ≈ [4.0, 6.0] + + # cover _match_eltype + arr = [1.0, 2.0] + @test _match_eltype(ValidVector{Vector{Float64}}, arr) === arr # Same type + arr_f32 = Float32[1.0, 2.0] + @test _match_eltype(ValidVector{Vector{Float64}}, arr_f32) isa Vector{Float64} # Different type +end diff --git a/test/test_constraints.jl b/test/test_constraints.jl index 0a6306923..38de8bb6c 100644 --- a/test/test_constraints.jl +++ b/test/test_constraints.jl @@ -18,6 +18,13 @@ violating_tree = Node(1, tree) @test check_constraints(tree, options) == true @test check_constraints(violating_tree, options) == false +# https://github.com/MilesCranmer/PySR/issues/896 +x1 = Node(; feature=1) +complex_expr = x1 * x1 * x1 * x1 * x1 # Complex expression with complexity > 4 +inv_complex = Node(1, complex_expr) # _inv applied to complex expression +binary_with_nested_inv = Node(5, x1, inv_complex) # _inv nested inside binary op, not at root +@test check_constraints(binary_with_nested_inv, options) == false + # Test complexity constraints: options = Options(; binary_operators=(+, *), maxsize=5) @extend_operators options @@ -55,3 +62,14 @@ tree = cos(cos(x1)) tree = cos(cos(cos(x1))) @test count_depth(tree) == 4 @test check_constraints(tree, options) == false + +_inv2(x) = 1 / x +options = Options(; + binary_operators=(+, *), unary_operators=(_inv2,), constraints=(_inv2 => 5,), maxsize=10 +) + +x1 = Node(; feature=1) +deep_expr = x1 * x1 * x1 * x1 * x1 * x1 +violating_tree = Node(1, deep_expr) + +@test check_constraints(violating_tree, options) == false diff --git a/test/test_custom_objectives.jl b/test/test_custom_objectives.jl index 3696f1750..694b38321 100644 --- a/test/test_custom_objectives.jl +++ b/test/test_custom_objectives.jl @@ -1,31 +1,31 @@ using SymbolicRegression +using SymbolicRegression: OperatorEnum, string_tree include("test_params.jl") def = quote + _ifelse_ternary(a, b, c) = a > 0 ? b : c + function my_custom_loss( tree::$(AbstractExpressionNode){T}, dataset::$(Dataset){T}, options::$(Options) ) where {T} - # We multiply the tree by 2.0: - tree = $(Node)(1, tree, $(Node)(T; val=2.0)) out, completed = $(eval_tree_array)(tree, dataset.X, options) if !completed return T(Inf) end - return sum(abs, out .- dataset.y) + return sum(abs, (out .* T(0.5)) .- dataset.y) end end # TODO: Required for workers as they assume the function is defined in the Main module if (@__MODULE__) != Core.Main Core.eval(Core.Main, def) - eval(:(using Main: my_custom_loss)) + eval(:(using Main: my_custom_loss, _ifelse_ternary)) else eval(def) end options = Options(; - binary_operators=[*, /, +, -], - unary_operators=[cos, sin], + operators=OperatorEnum(1 => (cos, sin), 2 => (*, /, +, -), 3 => (_ifelse_ternary,)), loss_function=my_custom_loss, elementwise_loss=nothing, maxsize=10, @@ -36,17 +36,19 @@ options = Options(; @test options.should_simplify == false -X = rand(2, 100) .* 10 -y = X[1, :] .+ X[2, :] +X = rand(3, 100) .* 10 .- 5 +y = _ifelse_ternary.(X[1, :], X[2, :], X[3, :]) # y = x1 > 0 ? x2 : x3 -# The best tree should be 0.5 * (x1 + x2), since the custom loss function -# multiplies the tree by 2.0. +# The best tree should be 2.0 * _ifelse_ternary(x1, x2, x3), since the custom loss function +# scales the tree output by 0.5. -hall_of_fame = equation_search( - X, y; niterations=100, options=options, parallelism=:multiprocessing, numprocs=1 -) +hall_of_fame = equation_search(X, y; niterations=100, options=options, parallelism=:serial) dominating = calculate_pareto_frontier(hall_of_fame) -testX = rand(2, 100) .* 10 -expected_y = 0.5 .* (testX[1, :] .+ testX[2, :]) +testX = rand(3, 100) .* 10 .- 5 # Range from -5 to 5 +expected_y = 2.0 .* _ifelse_ternary.(testX[1, :], testX[2, :], testX[3, :]) @test eval_tree_array(dominating[end].tree, testX, options)[1] ≈ expected_y atol = 1e-5 + +# Also verify that the tree actually uses the ternary operator +tree_string = string_tree(dominating[end].tree, options) +@test occursin("_ifelse_ternary", tree_string) diff --git a/test/test_custom_operators_multiprocessing.jl b/test/test_custom_operators_multiprocessing.jl index a8a1e3ef5..87b1f7ec5 100644 --- a/test/test_custom_operators_multiprocessing.jl +++ b/test/test_custom_operators_multiprocessing.jl @@ -1,55 +1,57 @@ -using SymbolicRegression -using Test +@testitem "Testing whether we can move operators to workers." tags = [:part1] begin + using SymbolicRegression + using Test -defs = quote - _plus(x, y) = x + y - _mult(x, y) = x * y - _div(x, y) = x / y - _min(x, y) = x - y - _cos(x) = cos(x) - _exp(x) = exp(x) - early_stop(loss, c) = ((loss <= 1e-10) && (c <= 6)) - my_loss(x, y, w) = abs(x - y)^2 * w - my_complexity(ex) = ceil(Int, length($(get_tree)(ex)) / 2) -end + defs = quote + _plus(x, y) = x + y + _mult(x, y) = x * y + _div(x, y) = x / y + _min(x, y) = x - y + _cos(x) = cos(x) + _exp(x) = exp(x) + early_stop(loss, c) = ((loss <= 1e-10) && (c <= 6)) + my_loss(x, y, w) = abs(x - y)^2 * w + my_complexity(ex) = ceil(Int, length($(get_tree)(ex)) / 2) + end -# This is needed as workers are initialized in `Core.Main`! -if (@__MODULE__) != Core.Main - Core.eval(Core.Main, defs) - eval( - :(using Main: - _plus, _mult, _div, _min, _cos, _exp, early_stop, my_loss, my_complexity), - ) -else - eval(defs) -end + # This is needed as workers are initialized in `Core.Main`! + if (@__MODULE__) != Core.Main + Core.eval(Core.Main, defs) + eval( + :(using Main: + _plus, _mult, _div, _min, _cos, _exp, early_stop, my_loss, my_complexity), + ) + else + eval(defs) + end -X = randn(Float32, 5, 100) -y = _mult.(2, _cos.(X[4, :])) + _mult.(X[1, :], X[1, :]) + X = randn(Float32, 5, 100) + y = _mult.(2, _cos.(X[4, :])) + _mult.(X[1, :], X[1, :]) -options = SymbolicRegression.Options(; - binary_operators=(_plus, _mult, _div, _min), - unary_operators=(_cos, _exp), - populations=20, - maxsize=15, - early_stop_condition=early_stop, - elementwise_loss=my_loss, - complexity_mapping=my_complexity, - batching=true, - batch_size=50, -) + options = SymbolicRegression.Options(; + binary_operators=(_plus, _mult, _div, _min), + unary_operators=(_cos, _exp), + populations=20, + maxsize=15, + early_stop_condition=early_stop, + elementwise_loss=my_loss, + complexity_mapping=my_complexity, + batching=true, + batch_size=50, + ) -hof = equation_search( - X, - y; - weights=ones(Float32, 100), - options=options, - niterations=1_000_000_000, - numprocs=2, - parallelism=:multiprocessing, -) + hof = equation_search( + X, + y; + weights=ones(Float32, 100), + options=options, + niterations=1_000_000_000, + numprocs=2, + parallelism=:multiprocessing, + ) -@test any( - early_stop(member.loss, my_complexity(member.tree)) for - member in hof.members[hof.exists] -) + @test any( + early_stop(member.loss, my_complexity(member.tree)) for + member in hof.members[hof.exists] + ) +end diff --git a/test/test_derivatives.jl b/test/test_derivatives.jl index 9615a9277..f670ca672 100644 --- a/test/test_derivatives.jl +++ b/test/test_derivatives.jl @@ -132,7 +132,9 @@ tree = equation3(nx1, nx2, nx3) """Check whether the ordering of constant_list is the same as the ordering of node_index.""" function check_tree( - tree::AbstractExpressionNode, node_index::NodeIndex, constant_list::AbstractVector + tree::AbstractExpressionNode{<:Any,2}, + node_index::NodeIndex, + constant_list::AbstractVector, ) if tree.degree == 0 (!tree.constant) || tree.val == constant_list[node_index.val::UInt16] diff --git a/test/test_early_stop.jl b/test/test_early_stop.jl index 3ba36e555..14f9a1cb9 100644 --- a/test/test_early_stop.jl +++ b/test/test_early_stop.jl @@ -1,19 +1,69 @@ -using SymbolicRegression +@testitem "Early stop condition" tags = [:part2] begin + using SymbolicRegression -X = randn(Float32, 5, 100) -y = 2 * cos.(X[4, :]) + X[1, :] .^ 2 + X = randn(Float32, 5, 100) + y = 2 * cos.(X[4, :]) + X[1, :] .^ 2 -early_stop(loss, c) = ((loss <= 1e-10) && (c <= 10)) + early_stop(loss, c) = ((loss <= 1e-10) && (c <= 10)) -options = SymbolicRegression.Options(; - binary_operators=(+, *, /, -), - unary_operators=(cos, exp), - populations=20, - early_stop_condition=early_stop, -) + options = SymbolicRegression.Options(; + binary_operators=(+, *, /, -), + unary_operators=(cos, exp), + populations=20, + early_stop_condition=early_stop, + ) -hof = equation_search(X, y; options=options, niterations=1_000_000_000) + hof = equation_search(X, y; options=options, niterations=1_000_000_000) -@test any( - early_stop(member.loss, count_nodes(member.tree)) for member in hof.members[hof.exists] -) + @test any( + early_stop(member.loss, count_nodes(member.tree)) for + member in hof.members[hof.exists] + ) +end + +@testitem "State preservation with niterations=0" tags = [:part2] begin + using SymbolicRegression + using Random + + # Regression test for https://github.com/MilesCranmer/SymbolicRegression.jl/issues/178 + + rng = MersenneTwister(42) + X = randn(rng, 2, 10) + y = X[1, :] .+ X[2, :] + + options = Options(; + binary_operators=(+,), + unary_operators=(), + verbosity=0, + progress=false, + population_size=5, + populations=2, + maxsize=5, + tournament_selection_n=2, + ) + + # Manually create saved state + dataset = Dataset(X, y) + pop1 = Population(dataset; population_size=5, nlength=3, options=options, nfeatures=2) + pop2 = Population(dataset; population_size=5, nlength=3, options=options, nfeatures=2) + hof = HallOfFame(options, dataset) + + saved_pops = [[pop1, pop2]] + saved_hof = [hof] + saved_state = (saved_pops, saved_hof) + + # Run with niterations=0 - should preserve populations + result_pops, result_hof = equation_search( + X, + y; + niterations=0, + saved_state=saved_state, + options=options, + parallelism=:serial, + return_state=true, + ) + + # Verify populations are preserved (not reset to size 1) + @test length(result_pops[1]) == 2 + @test all(pop -> length(pop.members) == 5, result_pops[1]) +end diff --git a/test/test_expression_builder.jl b/test/test_expression_builder.jl index ce39b6a1d..f57fe79f6 100644 --- a/test/test_expression_builder.jl +++ b/test/test_expression_builder.jl @@ -1,4 +1,57 @@ # This file tests particular functionality of ExpressionBuilderModule +@testitem "NamedTuple support in parse_expression" tags = [:part3] begin + using SymbolicRegression + using DynamicExpressions + + # Test basic NamedTuple parsing for template expressions + operators = OperatorEnum(; binary_operators=[+, -, *, /], unary_operators=[cos, sin]) + variable_names = ["x1", "x2"] + + # Create a simple template using @template_spec macro + template = @template_spec(expressions = (f, g)) do x1, x2 + f(x1, x2) * g(x1, x2) # Simple multiplication combination + end + options = Options(; operators, expression_spec=template) + + # Test NamedTuple parsing with expression_options using #N placeholder syntax + named_tuple_input = (; f="#1 + 1.0", g="#2 - 0.5") + result = parse_expression( + named_tuple_input; + options.expression_options, + operators, + expression_type=TemplateExpression, + node_type=Node{Float64,2}, + ) + + @test result isa TemplateExpression + @test result.trees.f isa ComposableExpression + @test result.trees.g isa ComposableExpression + @test length(result.trees) == 2 + @test keys(result.trees) == (:f, :g) + + # Test NamedTuple parsing with expression_spec + result_with_spec = parse_expression( + named_tuple_input; expression_spec=template, operators, node_type=Node{Float64,2} + ) + + @test result_with_spec isa TemplateExpression + @test typeof(result_with_spec) == typeof(result) + + # Test that different expression strings create different expressions using #N syntax + different_input = (; f="cos(#1)", g="sin(#2)") + different_result = parse_expression( + different_input; + options.expression_options, + operators, + expression_type=TemplateExpression, + node_type=Node{Float64}, + ) + + @test different_result isa TemplateExpression + @test different_result.trees.f isa ComposableExpression + @test different_result.trees.g isa ComposableExpression +end + @testitem "ParametricExpression" tags = [:part3] begin using SymbolicRegression using SymbolicRegression.ExpressionBuilderModule: diff --git a/test/test_expression_derivatives.jl b/test/test_expression_derivatives.jl index 02cf78740..7681f3a3f 100644 --- a/test/test_expression_derivatives.jl +++ b/test/test_expression_derivatives.jl @@ -37,7 +37,8 @@ end @testitem "Test derivatives during optimization" tags = [:part1] begin using SymbolicRegression - using SymbolicRegression.ConstantOptimizationModule: Evaluator, GradEvaluator + using SymbolicRegression.ConstantOptimizationModule: + Evaluator, GradEvaluator, EvaluatorContext using DynamicExpressions using Zygote: Zygote using Random: MersenneTwister @@ -55,7 +56,8 @@ end ex = @parse_expression( x * x - cos(2.5 * y), operators = options.operators, variable_names = [:x, :y] ) - f = Evaluator(ex, last(get_scalar_constants(ex)), dataset, options) + ctx = EvaluatorContext(dataset, options) + f = Evaluator(ex, last(get_scalar_constants(ex)), ctx) fg! = GradEvaluator(f, options.autodiff_backend) @test f(first(get_scalar_constants(ex))) isa Float64 @@ -68,75 +70,84 @@ end @testitem "Test derivatives of parametric expression during optimization" tags = [:part3] begin using SymbolicRegression - using SymbolicRegression.ConstantOptimizationModule: - Evaluator, GradEvaluator, optimize_constants, specialized_options + using SymbolicRegression.ConstantOptimizationModule: specialized_options using DynamicExpressions using Zygote: Zygote using Random: MersenneTwister - using DifferentiationInterface: value_and_gradient, AutoZygote, AutoEnzyme - enzyme_compatible = VERSION >= v"1.10.0" && VERSION < v"1.11.0-DEV.0" - @static if enzyme_compatible - using Enzyme: Enzyme - end + using DifferentiationInterface: AutoZygote + + # Import our AutoDiff helpers + include("autodiff_helpers.jl") rng = MersenneTwister(0) - X = rand(rng, 2, 32) - true_params = [0.5 2.0] - init_params = [0.1 0.2] - init_constants = [2.5, -0.5] - class = rand(rng, 1:2, 32) - y = [ - X[1, i] * X[1, i] - cos(2.6 * X[2, i] - 0.2) + true_params[1, class[i]] for - i in 1:32 - ] - - dataset = Dataset(X, y; extra=(; class)) - - (true_val, (true_d_params, true_d_constants)) = - value_and_gradient(AutoZygote(), (init_params, init_constants)) do (params, c) - pred = [ - X[1, i] * X[1, i] - cos(c[1] * X[2, i] + c[2]) + params[1, class[i]] for - i in 1:32 - ] - sum(abs2, pred .- y) / length(y) - end + # Set up test data using our helper + _, dataset, init_params, _, true_val, true_d_params, true_d_constants = setup_parametric_test( + rng + ) + + # Create options and expression options = Options(; unary_operators=[cos], binary_operators=[+, *, -], autodiff_backend=:Zygote ) - ex = @parse_expression( - x * x - cos(2.5 * y + -0.5) + p1, - operators = options.operators, - expression_type = ParametricExpression, - variable_names = ["x", "y"], - extra_metadata = (parameter_names=["p1"], parameters=init_params) + ex = create_parametric_expression(init_params, options.operators) + + # Test with Zygote + test_autodiff_backend( + ex, dataset, true_val, true_d_constants, true_d_params, options, AutoZygote() ) +end - function test_backend(ex, @nospecialize(backend); allow_failure=false) - x0, refs = get_scalar_constants(ex) - G = zero(x0) - - f = Evaluator(ex, refs, dataset, specialized_options(options)) - fg! = GradEvaluator(f, backend) - - @test f(x0) ≈ true_val - - try - val = fg!(nothing, G, x0) - @test val ≈ true_val - @test G ≈ vcat(true_d_constants[:], true_d_params[:]) - catch e - if allow_failure - @warn "Expected failure" e - else - rethrow(e) - end - end +@testitem "Test Enzyme derivatives of parametric expression" tags = [:enzyme] begin + using SymbolicRegression + using SymbolicRegression.ConstantOptimizationModule: specialized_options + using DynamicExpressions + using Random: MersenneTwister + using DifferentiationInterface: AutoZygote + + # Import our AutoDiff helpers + include("autodiff_helpers.jl") + + # Try to load Enzyme - skip test if not available + (enzyme_loaded, enzyme_error) = try + using Enzyme + using DifferentiationInterface: AutoEnzyme + (true, nothing) + catch e + (false, e) end - test_backend(ex, AutoZygote(); allow_failure=false) - @static if enzyme_compatible - test_backend(ex, AutoEnzyme(); allow_failure=true) + if !enzyme_loaded + @warn "Skipping Enzyme tests because Enzyme.jl could not be loaded" exception = + enzyme_error + @test_skip "Enzyme.jl is not available" + else + rng = MersenneTwister(0) + + # Set up test data using our helper + _, dataset, init_params, _, true_val, true_d_params, true_d_constants = setup_parametric_test( + rng + ) + + # Create options with Enzyme backend + options = Options(; + unary_operators=[cos], binary_operators=[+, *, -], autodiff_backend=:Enzyme + ) + + ex = create_parametric_expression(init_params, options.operators) + + # Test with Enzyme + test_autodiff_backend( + ex, + dataset, + true_val, + true_d_constants, + true_d_params, + options, + AutoEnzyme(); + allow_failure=true, + ) end + # TODO: Test with batched dataset end diff --git a/test/test_feature_mutation.jl b/test/test_feature_mutation.jl new file mode 100644 index 000000000..51a78abae --- /dev/null +++ b/test/test_feature_mutation.jl @@ -0,0 +1,79 @@ +@testitem "Test feature mutation" tags = [:part1] begin + using SymbolicRegression + using DynamicExpressions: Node + using StableRNGs: StableRNG + + rng = StableRNG(0) + + @testset "Basic feature mutation" begin + # Single feature node + tree = Node(Float64; feature=1) + mutated = SymbolicRegression.MutationFunctionsModule.mutate_feature(tree, 3, rng) + @test mutated.feature != 1 # Should change + @test 1 <= mutated.feature <= 3 # In valid range + end + + @testset "Edge cases" begin + # Single feature - should not change when nfeatures=1 + tree = Node(Float64; feature=1) + mutated = SymbolicRegression.MutationFunctionsModule.mutate_feature(tree, 1, rng) + @test mutated.feature == 1 + + # Constant node - should be unchanged + tree = Node(Float64; val=1.0) + original_val = tree.val + mutated = SymbolicRegression.MutationFunctionsModule.mutate_feature(tree, 3, rng) + @test mutated.val == original_val # Should be unchanged + end + + @testset "Mutation weights" begin + # Test that mutate_feature is included in MutationWeights + weights = MutationWeights() + @test hasfield(typeof(weights), :mutate_feature) + @test weights.mutate_feature == 0.1 + end + + @testset "get_nfeatures_for_mutation API" begin + using DynamicExpressions: Expression + using SymbolicRegression.MutationFunctionsModule: get_nfeatures_for_mutation + + # Test default implementation + operators = OperatorEnum(; binary_operators=[+, *], unary_operators=[cos]) + ex = Expression(Node{Float64}(; feature=1); operators=operators) + + # Default implementation should return global nfeatures + @test get_nfeatures_for_mutation(ex, nothing, 5) == 5 + @test get_nfeatures_for_mutation(ex, nothing, 10) == 10 + end + + @testset "TemplateExpression get_nfeatures_for_mutation" begin + # Create a template structure with different feature counts per subexpression + struct_different_features = TemplateStructure{(:f, :g)}( + ((; f, g), (x1, x2, x3, x4)) -> f(x1, x2) + g(x1, x3, x4); + # f uses features 1, 2; g uses features 1, 3, 4 + ) + + options = Options(; + binary_operators=(+, *), + unary_operators=(sin,), + expression_spec=TemplateExpressionSpec(; structure=struct_different_features), + ) + operators = options.operators + variable_names = ["x1", "x2", "x3", "x4"] + + # Create composable expressions + f_expr = ComposableExpression(Node{Float64}(; feature=1); operators, variable_names) + g_expr = ComposableExpression(Node{Float64}(; feature=1); operators, variable_names) + + # Create template expression + template_ex = TemplateExpression( + (; f=f_expr, g=g_expr); structure=struct_different_features, operators=operators + ) + + using SymbolicRegression.MutationFunctionsModule: get_nfeatures_for_mutation + + # Test that each subexpression gets its specific feature count + @test get_nfeatures_for_mutation(template_ex, :f, 4) == 2 + @test get_nfeatures_for_mutation(template_ex, :g, 4) == 3 + end +end diff --git a/test/test_filtered_async.jl b/test/test_filtered_async.jl new file mode 100644 index 000000000..2805213c6 --- /dev/null +++ b/test/test_filtered_async.jl @@ -0,0 +1,56 @@ +@testitem "@filtered_async error forwarding tests" tags = [:part2] begin + using Distributed: Distributed + using SymbolicRegression.SearchUtilsModule: SearchUtilsModule as SUM + using Test: Test + using Suppressor: Suppressor + @gensym addprocs rmprocs procs t result future channel + + # n.b., we have to run in main as workers get initialized there, + # and complain about not being able to access their own closures. + expr = quote + # Add a worker + $procs = $Distributed.addprocs(1) + try + $Distributed.@everywhere $procs Core.eval( + Core.Main, :(using Distributed: Distributed, @spawnat) + ) + + # Import Suppressor in Main for @suppress_err + $t = $SUM.@filtered_async 42 + $result = fetch($t) + $Test.@test $result == 42 + + $future = $Distributed.@spawnat $procs[1] 43 + $result = fetch($future) + $Test.@test $result == 43 + + # With no error + $future = $SUM.@sr_spawner( + 44, parallelism = :multiprocessing, worker_idx = $procs[1] + ) + $channel = Channel(1) + $t = $SUM.@filtered_async put!($channel, fetch($future)) + $Test.@test_nowarn fetch($t) + $Test.@test take!($channel) == 44 + + # With an error - suppress stderr but verify error forwarding works + $Suppressor.@suppress_err begin + $future = $SUM.@sr_spawner( + throw(ArgumentError("test multiprocessing error")), + parallelism = :multiprocessing, + worker_idx = $procs[1] + ) + $t = $SUM.@filtered_async fetch($future) + $Test.@test_throws TaskFailedException fetch($t) + end + + # Test ProcessExitedException filtering (should be filtered out by @filtered_async) + $t = $SUM.@filtered_async throw($Distributed.ProcessExitedException($procs[1])) + $Test.@test_nowarn fetch($t) + + finally + $Distributed.rmprocs($procs) + end + end + Core.eval(Core.Main, expr) +end diff --git a/test/test_guesses.jl b/test/test_guesses.jl new file mode 100644 index 000000000..2081de765 --- /dev/null +++ b/test/test_guesses.jl @@ -0,0 +1,538 @@ +@testitem "Single output, single guess" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: calculate_pareto_frontier + using Test + + X = randn(2, 50) + y = @. 2.0 * X[1, :]^2 + 3.0 * X[2, :] + 0.5 + + options = Options(; + binary_operators=(+, *), unary_operators=(), verbosity=0, progress=false + ) + + # See if a good guess helps the search + good_guess = "2.0*x1*x1 + 3.0*x2 + 0.5" + hof = equation_search(X, y; niterations=0, options, guesses=[good_guess]) + dominating = calculate_pareto_frontier(hof) + + @test any(m -> m.loss < 1e-10, dominating) + + # We also test that this test correctly measures the behavior + bad_guess = "1.0*x1 + 1.0*x2 + 0.0" + hof = equation_search(X, y; niterations=0, options, guesses=[bad_guess]) + dominating = calculate_pareto_frontier(hof) + + @test !any(m -> m.loss < 1e-10, dominating) +end + +@testitem "parse_guesses with NamedTuple" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: parse_guesses, Dataset, PopMember + using Test + + # Create test data + X = Float64[1.0 2.0; 3.0 4.0] + y = Float64[5.0, 6.0] + dataset = Dataset(X, y) + + # Create template options + operators = OperatorEnum(; binary_operators=[+, -, *], unary_operators=[]) + template = @template_spec(expressions = (f, g)) do x1, x2 + f(x1, x2) + g(x1, x2) + end + options = Options(; operators=operators, expression_spec=template) + + # Test NamedTuple guess with #N placeholder syntax + namedtuple_guess = (; f="2.0 * #1", g="1.5 * #2") + + # Test parse_guesses function directly + parsed_members = parse_guesses( + PopMember{Float64,Float64}, [namedtuple_guess], [dataset], options + ) + + # Should return a vector of vectors (one per output dataset) + @test length(parsed_members) == 1 + @test length(parsed_members[1]) == 1 + + # Check that the parsed member is correct type + member = parsed_members[1][1] + @test member isa PopMember + @test member.tree isa TemplateExpression + + # Check that the template expression has the right structure + @test haskey(member.tree.trees, :f) + @test haskey(member.tree.trees, :g) + @test member.tree.trees.f isa ComposableExpression + @test member.tree.trees.g isa ComposableExpression + + # Test multiple NamedTuple guesses + multiple_guesses = [(; f="#1", g="#2"), (; f="2.0 * #1", g="1.5 * #2")] + + parsed_multiple = parse_guesses( + PopMember{Float64,Float64}, multiple_guesses, [dataset], options + ) + + @test length(parsed_multiple) == 1 + @test length(parsed_multiple[1]) == 2 + @test all(m -> m.tree isa TemplateExpression, parsed_multiple[1]) +end + +@testitem "parse_guesses with NamedTuple and parameters" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: parse_guesses, Dataset, PopMember + using Test + + # Create test data + X = Float64[1.0 2.0; 3.0 4.0] + y = Float64[5.0, 6.0] + dataset = Dataset(X, y) + + # Create template with parameters + operators = OperatorEnum(; binary_operators=[+, -, *], unary_operators=[]) + template = @template_spec(expressions = (f,), parameters = (p=2,)) do x1, x2 + f(x1, x2) + p[1] * x1 + p[2] + end + options = Options(; operators=operators, expression_spec=template) + + # Test NamedTuple guess - should auto-initialize parameters + namedtuple_guess = (; f="#1 * #2") + + parsed_members = parse_guesses( + PopMember{Float64,Float64}, [namedtuple_guess], [dataset], options + ) + + member = parsed_members[1][1] + @test member.tree isa TemplateExpression + @test haskey(get_metadata(member.tree).parameters, :p) + @test length(get_metadata(member.tree).parameters.p._data) == 2 +end + +@testitem "NamedTuple guesses with different variable names" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: calculate_pareto_frontier + using Test + + X = randn(3, 20) + y = @. X[1, :] + 2.0 * X[2, :] - X[3, :] + + operators = OperatorEnum(; binary_operators=[+, -, *], unary_operators=[]) + variable_names = ["input1", "input2", "input3"] + + # Create template using @template_spec macro + template = @template_spec(expressions = (term1, term2)) do input1, input2, input3 + term1(input1, input2, input3) + term2(input1, input2, input3) + end + + options = Options(; + operators=operators, expression_spec=template, verbosity=0, progress=false + ) + + # Test NamedTuple guess with custom variable names using #N placeholder syntax + guess_with_custom_names = (; term1="#1 + 2.0 * #2", term2="-1.0 * #3") + hof = equation_search( + X, + y; + niterations=0, + options, + guesses=[guess_with_custom_names], + variable_names=variable_names, + ) + dominating = calculate_pareto_frontier(hof) + + @test any(m -> m.loss < 1e-8, dominating) # Should find exact solution + @test any(m -> m.tree isa TemplateExpression, dominating) +end + +@testitem "Float32 dataset with Float64 guess literals" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: parse_guesses, Dataset, PopMember + using Test + + # Create Float32 dataset + X = Float32[1.0 2.0; 3.0 4.0] + y = Float32[5.0, 6.0] + dataset = Dataset(X, y) + + options = Options(; + binary_operators=[+, -, *, /], verbosity=0, progress=false, deterministic=true + ) + + guess_with_float64_literals = "4.561253 - ((x1 - x2) * 0.18459733)" + + parsed_members = parse_guesses( + PopMember{Float32,Float32}, [guess_with_float64_literals], [dataset], options + ) + @test length(parsed_members) == 1 + @test length(parsed_members[1]) == 1 + @test parsed_members[1][1] isa PopMember{Float32,Float32} + + # Test that Float32 literals work fine + guess_with_float32_literals = "4.561253f0 - ((x1 - x2) * 0.18459733f0)" + parsed_members = parse_guesses( + PopMember{Float32,Float32}, [guess_with_float32_literals], [dataset], options + ) + @test length(parsed_members) == 1 + @test length(parsed_members[1]) == 1 + @test parsed_members[1][1] isa PopMember{Float32,Float32} +end + +@testitem "Custom operators in string guesses" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: parse_guesses, Dataset, PopMember, calculate_pareto_frontier + using Test + + # Define custom operators + pythag_pos(x, y) = sqrt(x^2 + y^2) + pythag_neg(x, y) = (d = x^2 - y^2) < 0 ? typeof(x)(NaN) : typeof(x)(sqrt(d)) + custom_sin(x) = sin(x) + 0.1 + + # Test with binary custom operators + X = Float64[1.0 2.0 3.0; 4.0 5.0 6.0] + y = Float64[7.0, 8.0, 9.0] + dataset = Dataset(X, y) + + options = Options(; + binary_operators=[+, -, *, /, pythag_pos, pythag_neg], + unary_operators=[sin, cos, custom_sin], + verbosity=0, + progress=false, + ) + + # Test that custom operators work in string guesses + custom_guess = "pythag_pos(x1, x2) + custom_sin(x1)" + parsed_members = parse_guesses( + PopMember{Float64,Float64}, [custom_guess], [dataset], options + ) + + @test length(parsed_members) == 1 + @test length(parsed_members[1]) == 1 + @test parsed_members[1][1] isa PopMember{Float64,Float64} + + # Test with complex expression like the original failing case + complex_guess = "pythag_pos(x1, 4.51352 - ((x2 - x1) * 0.07425507))" + parsed_complex = parse_guesses( + PopMember{Float64,Float64}, [complex_guess], [dataset], options + ) + + @test length(parsed_complex) == 1 + @test length(parsed_complex[1]) == 1 + @test parsed_complex[1][1] isa PopMember{Float64,Float64} + + # Test multiple custom operator guesses + multiple_custom_guesses = [ + "pythag_pos(x1, x2)", "pythag_neg(x1, x2) + 0.5", "custom_sin(x1) * x2" + ] + parsed_multiple = parse_guesses( + PopMember{Float64,Float64}, multiple_custom_guesses, [dataset], options + ) + + @test length(parsed_multiple) == 1 + @test length(parsed_multiple[1]) == 3 + @test all(m -> m isa PopMember{Float64,Float64}, parsed_multiple[1]) +end + +@testitem "Custom operators in equation_search guesses" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: calculate_pareto_frontier + using Test + + # Define custom operators + pythag_pos(x, y) = sqrt(x^2 + y^2) + pythag_neg(x, y) = (d = x^2 - y^2) < 0 ? typeof(x)(NaN) : typeof(x)(sqrt(d)) + + # Create synthetic data where custom operator is the true function + X = randn(2, 30) + y = pythag_pos.(X[1, :], X[2, :]) .+ 0.01 .* randn(30) # Add small noise + + options = Options(; + binary_operators=[+, -, *, /, pythag_pos, pythag_neg], + unary_operators=[sin, cos], + verbosity=0, + progress=false, + ) + + # Test that custom operator guess works in equation_search + custom_guess = "pythag_pos(x1, x2)" + hof = equation_search(X, y; niterations=0, options, guesses=[custom_guess]) + dominating = calculate_pareto_frontier(hof) + + # Should find a good solution since we gave it the exact function + @test any(m -> m.loss < 1e-2, dominating) + + # Test more complex custom operator expression + complex_guess = "pythag_pos(x1, x2) + 0.0" + hof_complex = equation_search(X, y; niterations=0, options, guesses=[complex_guess]) + dominating_complex = calculate_pareto_frontier(hof_complex) + + @test any(m -> m.loss < 1e-2, dominating_complex) +end + +@testitem "Custom operators error handling in guesses" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: parse_guesses, Dataset, PopMember + using Test + + # Define custom operator + pythag_pos(x, y) = sqrt(x^2 + y^2) + + X = Float64[1.0 2.0; 3.0 4.0] + y = Float64[5.0, 6.0] + dataset = Dataset(X, y) + + options = Options(; + binary_operators=[+, -, *, /, pythag_pos], verbosity=0, progress=false + ) + + # Test wrong arity error + @test_throws ArgumentError parse_guesses( + PopMember{Float64,Float64}, ["pythag_pos(x1)"], [dataset], options + ) + + # Test non-existent operator error + @test_throws ArgumentError parse_guesses( + PopMember{Float64,Float64}, ["nonexistent_op(x1, x2)"], [dataset], options + ) +end + +@testitem "Custom operators with NamedTuple guesses" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: parse_guesses, Dataset, PopMember + using Test + + # Define custom operators + pythag_pos(x, y) = sqrt(x^2 + y^2) + custom_mul(x, y) = x * y * 1.1 + + X = Float64[1.0 2.0; 3.0 4.0] + y = Float64[5.0, 6.0] + dataset = Dataset(X, y) + + # Create template options with custom operators + operators = OperatorEnum(; + binary_operators=[+, -, *, pythag_pos, custom_mul], unary_operators=[] + ) + template = @template_spec(expressions = (f, g)) do x1, x2 + f(x1, x2) + g(x1, x2) + end + options = Options(; operators=operators, expression_spec=template) + + # Test NamedTuple guess with custom operators using #N placeholder syntax + namedtuple_guess = (; f="pythag_pos(#1, #2)", g="custom_mul(#1, #2)") + + parsed_members = parse_guesses( + PopMember{Float64,Float64}, [namedtuple_guess], [dataset], options + ) + + @test length(parsed_members) == 1 + @test length(parsed_members[1]) == 1 + + member = parsed_members[1][1] + @test member isa PopMember + @test member.tree isa TemplateExpression + @test haskey(member.tree.trees, :f) + @test haskey(member.tree.trees, :g) +end + +@testitem "Smoke test migration with multiple outputs and templates" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: calculate_pareto_frontier + using Test + + # Multi-output data + X = randn(2, 20) + y1 = @. 2.0 * X[1, :] + X[2, :] + y2 = @. X[1, :] - X[2, :] + Y = [y1 y2]' + + # Template expressions + operators = OperatorEnum(; binary_operators=[+, -, *], unary_operators=[]) + template = @template_spec(expressions = (f,)) do x1, x2 + f(x1, x2) + end + options = Options(; + operators=operators, + expression_spec=template, + fraction_replaced_guesses=0.5, + verbosity=0, + progress=false, + ) + guesses = [[(; f="1.9 * #1 + #2")], [(; f="#1 - #2")]] + hof = equation_search(X, Y; niterations=1, options, guesses) + + @test all(h -> any(m -> m.loss < 0.01, calculate_pareto_frontier(h)), hof) +end + +@testitem "parse_guesses with mix of strings and expression objects" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: parse_guesses, Dataset, PopMember + using Test + using DynamicExpressions: @parse_expression + + X = Float64[1.0 2.0; 3.0 4.0] + y = Float64[5.0, 6.0] + dataset = Dataset(X, y) + options = Options(; binary_operators=[+, -, *, /], unary_operators=[sin, cos]) + expr1 = Expression( + Node{Float64}(; feature=1); operators=nothing, variable_names=nothing + ) + expr2 = "x1 - x2" + expr3 = Expression(Node{Float64}(; val=1.0); operators=nothing, variable_names=nothing) + mixed_guesses = [expr1, expr2, expr3] + + # Test parse_guesses with mixed input types + parsed_members = parse_guesses( + PopMember{Float64,Float64}, mixed_guesses, [dataset], options + ) + + # Should return a vector of vectors (one per output dataset) + @test length(parsed_members) == 1 + @test length(parsed_members[1]) == 3 + + # Check that all parsed members are correct type + for member in parsed_members[1] + @test member isa PopMember{Float64,Float64} + @test member.tree !== nothing + @test member.tree isa Expression + end + + # No constant optimization happens yet + @test parsed_members[1][1].tree == expr1 + @test string_tree(with_metadata(parsed_members[1][2].tree; options.operators)) == + "x1 - x2" + + # However, this one does get optimized + @test parsed_members[1][3].tree != expr3 + @test parsed_members[1][3].tree.tree.val != 1.0 +end + +@testitem "maxsize warning" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: parse_guesses, Dataset, PopMember + using Test + using Logging + + X = Float64[1.0 2.0; 3.0 4.0] + y = Float64[5.0, 6.0] + dataset = Dataset(X, y) + options = Options(; binary_operators=[+, -, *, /], maxsize=7) + + # Test complex guess triggers warning + io = IOBuffer() + with_logger(Logging.SimpleLogger(io, Logging.Warn)) do + parse_guesses( + PopMember{Float64,Float64}, + ["x1 * x2 + x1 * x2 + x1 * x2 + x1 * x2 + x1 * x2"], + [dataset], + options, + ) + end + log_output = String(take!(io)) + @test contains(log_output, "complexity") && contains(log_output, "maxsize") + + # Test simple guess doesn't trigger warning + io = IOBuffer() + with_logger(Logging.SimpleLogger(io, Logging.Warn)) do + parse_guesses(PopMember{Float64,Float64}, ["x1 + x2"], [dataset], options) + end + @test !contains(String(take!(io)), "maxsize") +end + +@testitem "Vector of vectors input for single output" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: parse_guesses, Dataset, PopMember + using Test + + X = Float64[1.0 2.0; 3.0 4.0] + y = Float64[5.0, 6.0] + dataset = Dataset(X, y) + options = Options(; binary_operators=[+, -]) + + # Single output (nout=1) with vector-of-vectors format + guesses_vector_of_vectors = [["x1 + x2", "x1 - x2"]] + parsed_members = parse_guesses( + PopMember{Float64,Float64}, guesses_vector_of_vectors, [dataset], options + ) + @test length(parsed_members) == 1 # One output + @test length(parsed_members[1]) == 2 # Two guesses for that output +end + +@testitem "Multiple outputs guesses format validation" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: parse_guesses, Dataset, PopMember + using Test + + datasets = [Dataset(randn(2, 10), randn(10)) for _ in 1:2] + options = Options() + + @test_throws( + ArgumentError("`guesses` must be a vector of vectors when `nout > 1`"), + parse_guesses( + PopMember{Float64,Float64}, ["x1 + x2", "x1 - x2"], datasets, options + ) + ) +end + +@testitem "File saving with niterations=0" tags = [:part1] begin + using SymbolicRegression + using Test + using Random: MersenneTwister + + # Create test data + rng = MersenneTwister(0) + X = randn(rng, 2, 30) + y = @. 2.0 * X[1, :]^2 + 3.0 * X[2, :] + 0.5 + + tmpdir = mktempdir() + options = Options(; + binary_operators=(+, *), + unary_operators=(), + verbosity=0, + progress=false, + save_to_file=true, + seed=0, + deterministic=true, + output_directory=tmpdir, + ) + + # Test that files are saved even when niterations=0, including guesses + good_guess = "2.0*x1*x1 + 3.0*x2 + 0.5" + hof = equation_search( + X, y; niterations=0, options=options, parallelism=:serial, guesses=[good_guess] + ) + + output_files = [] + for (root, dirs, files) in walkdir(tmpdir) + for file in files + if endswith(file, ".csv") && contains(file, "hall_of_fame") + push!(output_files, joinpath(root, file)) + end + end + end + @test length(output_files) == 1 + + expected_file = only(output_files) + content = read(expected_file, String) + @test !isempty(content) + @test contains(content, "Complexity") + @test contains(content, "Loss") + @test contains(content, "x1") + + lines = split(content, '\n') + equation_lines = filter( + line -> !startswith(line, "Complexity") && !isempty(strip(line)), lines + ) + @test length(equation_lines) > 0 + + # Check that one equation matches our guess: "2.0*x1*x1 + 3.0*x2 + 0.5" + @test any(equation_lines) do line + parts = split(line, ',') + if length(parts) >= 3 + equation_part = strip(parts[end]) + equation_part = strip(equation_part, '"') + if equation_part == "(((2.0 * x1) * x1) + (3.0 * x2)) + 0.5" + return true + end + end + return false + end +end diff --git a/test/test_loss_function_expression_multiprocessing.jl b/test/test_loss_function_expression_multiprocessing.jl index f349ad961..5843475b6 100644 --- a/test/test_loss_function_expression_multiprocessing.jl +++ b/test/test_loss_function_expression_multiprocessing.jl @@ -1,11 +1,13 @@ using SymbolicRegression +using SymbolicRegression: compute_complexity using Test defs = quote - using SymbolicRegression - early_stop(loss, c) = ((loss <= 1e-10) && (c <= 4)) - function my_loss_expression(ex::Expression, dataset::Dataset, options::Options) + expression_spec = @template_spec(expressions = (f,),) do x1, x2, x3, x4, x5 + f(x1, x2, x3, x4, x5) + end + function my_loss_expression(ex::AbstractExpression, dataset::Dataset, options::Options) prediction, complete = eval_tree_array(ex, dataset.X, options) if !complete return Inf @@ -16,8 +18,9 @@ end # This is needed as workers are initialized in `Core.Main`! if (@__MODULE__) != Core.Main + Core.eval(Core.Main, :(using SymbolicRegression)) Core.eval(Core.Main, defs) - eval(:(using Main: early_stop, my_loss_expression)) + eval(:(using Main: early_stop, expression_spec, my_loss_expression)) else eval(defs) end @@ -29,6 +32,7 @@ options = SymbolicRegression.Options(; binary_operators=[*, +], unary_operators=[cos], early_stop_condition=early_stop, + expression_spec=expression_spec, loss_function_expression=my_loss_expression, batching=true, batch_size=32, @@ -45,6 +49,6 @@ hof = equation_search( ) @test any( - early_stop(member.loss, length(get_tree(member.tree))) for + early_stop(member.loss, compute_complexity(member.tree, options)) for member in hof.members[hof.exists] ) diff --git a/test/test_loss_scale.jl b/test/test_loss_scale.jl new file mode 100644 index 000000000..5d042ae5c --- /dev/null +++ b/test/test_loss_scale.jl @@ -0,0 +1,172 @@ +@testitem "loss_scale parameter validation" tags = [:part2] begin + using SymbolicRegression + + # Test Options constructor assertion + @test_throws AssertionError Options(loss_scale=:invalid) + + # Test we can create options with valid values + options_log = Options(; loss_scale=:log) + options_linear = Options(; loss_scale=:linear) + + @test options_log.loss_scale == :log + @test options_linear.loss_scale == :linear +end + +@testitem "loss_scale score computation" tags = [:part2] begin + using SymbolicRegression.HallOfFameModule: + compute_direct_score, compute_zero_centered_score + + @test compute_direct_score(0.5, 1.0, 1.0) ≈ 0.5 + @test compute_direct_score(1.2, 1.0, 1.0) ≈ 0.0 + + @test compute_zero_centered_score(0.5, 1.0, 1.0) ≈ log(2) atol = 1e-5 + @test compute_zero_centered_score(0.1, 1.0, 1.0) ≈ log(10) atol = 1e-5 + @test compute_zero_centered_score(2.0, 1.0, 1.0) ≈ 0.0 +end + +@testitem "loss_scale in choose_best" tags = [:part2] begin + using SymbolicRegression + using SymbolicRegression.MLJInterfaceModule: choose_best + + # Test data + trees = [1, 2, 3, 4] # Placeholder, not used in function + losses = [0.5, 0.3, 0.4, 0.2] + scores = [0.1, 0.8, 0.5, 0.3] + complexities = [1, 2, 3, 4] + + # With loss_scale=:log (default behavior) + options_log = Options(; loss_scale=:log) + best_idx_log = choose_best(; + trees=trees, + losses=losses, + scores=scores, + complexities=complexities, + options=options_log, + ) + @test best_idx_log == 2 # Best score (0.8) among those with loss <= 1.5*min_loss + + # With loss_scale=:linear + options_linear = Options(; loss_scale=:linear) + best_idx_linear = choose_best(; + trees=trees, + losses=losses, + scores=scores, + complexities=complexities, + options=options_linear, + ) + @test best_idx_linear == 4 # Simply picks minimum loss (0.2) +end + +@testitem "loss_scale in pareto_volume" tags = [:part2] begin + using SymbolicRegression.LoggingModule: pareto_volume + + # Test data + test_losses = [0.5, 0.3, 0.2] + test_complexities = [1, 3, 5] + + # Both should produce valid volumes + @test pareto_volume(test_losses, test_complexities, 10, false) > 0 # log mode + @test pareto_volume(test_losses, test_complexities, 10, true) > 0 # linear mode + + # Test negative losses work with linear mode but not log mode + neg_losses = [0.1, -0.1, -0.5] + @test pareto_volume(neg_losses, test_complexities, 10, true) > 0 # works with linear +end + +@testitem "loss_scale in MLJ interface" tags = [:part2] begin + using SymbolicRegression + using SymbolicRegression: get_options + + # Test MLJ interface supports loss_scale parameter + model_log = SRRegressor(; loss_scale=:log) + model_linear = SRRegressor(; loss_scale=:linear) + @test get_options(model_log).loss_scale == :log + @test get_options(model_linear).loss_scale == :linear + + # Test with multitarget regressor too + model_mt_log = MultitargetSRRegressor(; loss_scale=:log) + model_mt_linear = MultitargetSRRegressor(; loss_scale=:linear) + @test get_options(model_mt_log).loss_scale == :log + @test get_options(model_mt_linear).loss_scale == :linear +end + +@testitem "loss_scale error handling" tags = [:part2] begin + using SymbolicRegression + using SymbolicRegression.CoreModule: Dataset + using SymbolicRegression.HallOfFameModule: format_hall_of_fame + using SymbolicRegression.PopMemberModule: PopMember + using DynamicExpressions: Node + + # Create test dataset + X = [1.0 2.0] + y = [3.0] + dataset = Dataset(X, y; variable_names=["x1", "x2"]) + + # Create options with different loss scales + options_log = Options(; loss_scale=:log, binary_operators=[+, -, *], unary_operators=[]) + options_linear = Options(; loss_scale=:linear) + + # Create a simple test case with negative loss + hof = HallOfFame(options_log, dataset) + hof.members[1].tree = Expression( + Node{Float64}(; feature=1); operators=nothing, variable_names=nothing + ) + hof.members[1].loss = -1.0 + hof.exists[1] = true + + # With :log scale, should throw a DomainError with a helpful message + err = try + format_hall_of_fame(hof, options_log) + nothing + catch e + e + end + @test err isa DomainError + @test occursin("must be non-negative", err.msg) + @test occursin("set the `loss_scale` to linear", err.msg) + + # With :linear scale, should work fine with negative losses + result = format_hall_of_fame(hof, options_linear) + @test result.losses[1] == -1.0f0 + @test result.scores[1] >= 0.0 +end + +@testitem "string_dominating_pareto_curve header display" tags = [:part2] begin + using SymbolicRegression + using SymbolicRegression.HallOfFameModule: HallOfFame, string_dominating_pareto_curve + using SymbolicRegression.CoreModule: Dataset + using DynamicExpressions: Node, Expression + + # Create simple test dataset + X = [1.0 2.0] + y = [3.0] + dataset = Dataset(X, y; variable_names=["x1", "x2"]) + + # Create options with different loss scales + options_log = Options(; loss_scale=:log, binary_operators=[+, -], unary_operators=[]) + options_linear = Options(; + loss_scale=:linear, binary_operators=[+, -], unary_operators=[] + ) + + # Create a minimal Hall of Fame with one element + hof = HallOfFame(options_log, dataset) + hof.members[1].tree = Expression( + Node{Float64}(; feature=1); operators=nothing, variable_names=nothing + ) + hof.members[1].loss = 0.5 + hof.exists[1] = true + + # Test with log scale (should show Score column) + output_log = string_dominating_pareto_curve(hof, dataset, options_log) + @test occursin("Complexity", output_log) + @test occursin("Loss", output_log) + @test occursin("Score", output_log) + @test occursin("Equation", output_log) + + # Test with linear scale (should NOT show Score column) + output_linear = string_dominating_pareto_curve(hof, dataset, options_linear) + @test occursin("Complexity", output_linear) + @test occursin("Loss", output_linear) + @test !occursin("Score", output_linear) + @test occursin("Equation", output_linear) +end diff --git a/test/test_mixed_utils.jl b/test/test_mixed_utils.jl index bad2b2d73..7bc94eb40 100644 --- a/test/test_mixed_utils.jl +++ b/test/test_mixed_utils.jl @@ -124,7 +124,7 @@ function test_mixed(i, batching::Bool, weighted::Bool, parallelism) @test length(dom) > 0 best = dom[end] # Assert we created the correct type of trees: - @test node_type(typeof(best.tree)) == Node{T} + @test node_type(typeof(best.tree)) <: Node{T} # Test the cost @test best.loss < maximum_residual diff --git a/test/test_mlj.jl b/test/test_mlj.jl index faadf5a6e..8e9342e0a 100644 --- a/test/test_mlj.jl +++ b/test/test_mlj.jl @@ -107,7 +107,7 @@ end rng = MersenneTwister(0) X = (b1=randn(rng, 32), b2=randn(rng, 32)) - Y = (c1=X.b1 .* X.b2, c2=X.b1 .+ X.b2) + Y = (c1=(X.b1 .* X.b2), c2=(X.b1 .+ X.b2)) w = ones(32) model = MultitargetSRRegressor(; niterations=10, stop_kws...) mach = machine(model, X, Y, w) @@ -257,3 +257,36 @@ end end @test occursin("Evaluation failed either due to", msg) end + +@testitem "MLJ options caching fix" tags = [:part3] begin + using SymbolicRegression + using SymbolicRegression: WarmStartIncompatibleError + using MLJBase + using Random: MersenneTwister + using Suppressor + + include("test_params.jl") + + # Test that parameter changes are respected and incompatible changes throw errors + rng = MersenneTwister(0) + X = (x1=randn(rng, 50), x2=randn(rng, 50)) + y = @. 2.0 * X.x1 + 3.0 * X.x2 + + model = SRRegressor(; + binary_operators=[+, -, *], niterations=2, tournament_selection_n=10, populations=2 + ) + + mach = machine(model, X, y) + @suppress fit!(mach, verbosity=0) + + # Test compatible parameter change + model.tournament_selection_n = 20 + @suppress fit!(mach, verbosity=0) + @test mach.fitresult.options.tournament_selection_n == 20 # Should be updated + + # Test incompatible parameter change throws error with correct message + model.populations = 4 + err = @test_throws WarmStartIncompatibleError @suppress fit!(mach, verbosity=0) + @test :populations ∈ err.value.fields + @test occursin("force=true", sprint(showerror, err.value)) +end diff --git a/test/test_mooncake_autodiff.jl b/test/test_mooncake_autodiff.jl new file mode 100644 index 000000000..e7789fe76 --- /dev/null +++ b/test/test_mooncake_autodiff.jl @@ -0,0 +1,141 @@ +@testitem "Expression constant optimization with Mooncake" tags = [:mooncake, :part1] begin + using SymbolicRegression + using SymbolicRegression.ConstantOptimizationModule: optimize_constants + using DynamicExpressions: get_scalar_constants + using StableRNGs: StableRNG + using Mooncake + using DifferentiationInterface: AutoMooncake + + backend = AutoMooncake(; config=nothing) + default_args = (; + binary_operators=(+, -, *), + unary_operators=(sin,), + autodiff_backend=backend, + should_optimize_constants=true, + optimizer_nrestarts=3, + optimizer_probability=1.0, + optimizer_iterations=1000, + ) + + @testset "Expression" begin + options = Options(; default_args...) + + # Create expression with constants to optimize + x1 = Expression(Node(Float64; feature=1); options.operators) + x2 = Expression(Node(Float64; feature=2); options.operators) + # Start with slightly wrong constants + tree = 2.0 * x1 + sin(2.5 * x2 + 0.9) - 1.4 + + rng = StableRNG(0) + + # Generate test data + X = rand(rng, 2, 32) .* 10 + y = @. 2.1 * X[1, :] + sin(2.6 * X[2, :] + 0.8) - 1.5 + dataset = Dataset(X, y) + + member = PopMember(dataset, tree, options; deterministic=false) + initial_loss = member.loss + + # Run constant optimization + optimized_member, num_evals = optimize_constants( + dataset, copy(member), options; rng=rng + ) + + @test optimized_member.loss < 1e-10 + @test num_evals > 0 + + constants, _ = get_scalar_constants(optimized_member.tree) + @test length(constants) == 4 + @test all(isfinite, constants) + end + + @testset "TemplateExpression" begin + spec = @template_spec(expressions = (f, g)) do x, y, z + f(x, y) + 2.0 * g(3.0 * z) + end + options = Options(; default_args..., expression_spec=spec) + + arg1 = ComposableExpression(Node{Float64}(; feature=1); options.operators) + + true_f = 2.0 * arg1 - 1.5 + true_g = 0.9 * sin(arg1 * 0.2) + + init_f = 1.9 * arg1 - 1.4 + init_g = 0.8 * sin(arg1 * 0.25) + + true_tree = TemplateExpression( + (; f=true_f, g=true_g); spec.structure, options.operators + ) + init_tree = TemplateExpression( + (; f=init_f, g=init_g); spec.structure, options.operators + ) + + rng = StableRNG(1) + dataset = let + X = rand(rng, 3, 32) .* 10 + y = true_tree(X) + Dataset(X, y) + end + + @test length(get_scalar_constants(true_tree)[1]) == 4 + + member = PopMember(dataset, init_tree, options; deterministic=false) + optimized_member, num_evals = optimize_constants( + dataset, copy(member), options; rng=rng + ) + + @test optimized_member.loss < 1e-10 + @test num_evals > 0 + + constants, _ = get_scalar_constants(optimized_member.tree) + @test length(constants) == 4 + @test all(isfinite, constants) + end + + @testset "TemplateExpression with parameters" begin + spec = @template_spec(expressions = (f, g), parameters = (p=1,),) do x, y, z, w + f(x, y) + g(3.0 * z) + p[1] * w + end + options = Options(; default_args..., expression_spec=spec) + + arg1 = ComposableExpression(Node{Float64}(; feature=1); options.operators) + + true_f = 2.0 * arg1 - 1.5 + true_g = 0.9 * sin(arg1 * 0.2) + + init_f = 1.9 * arg1 - 1.4 + init_g = 0.8 * sin(arg1 * 0.25) + + true_tree = TemplateExpression( + (; f=true_f, g=true_g); + spec.structure, + options.operators, + parameters=(; p=[0.9]), + ) + init_tree = TemplateExpression( + (; f=init_f, g=init_g); + spec.structure, + options.operators, + parameters=(; p=[0.5]), + ) + + rng = StableRNG(0) + dataset = let + X = rand(rng, 4, 32) .* 10 + y = true_tree(X) + Dataset(X, y) + end + + @test length(get_scalar_constants(true_tree)[1]) == 5 + + member = PopMember(dataset, init_tree, options; deterministic=false) + optimized_member, num_evals = optimize_constants( + dataset, copy(member), options; rng=rng + ) + + @test optimized_member.loss < 1e-10 + @test num_evals > 0 + + @test get_metadata(optimized_member.tree).parameters.p ≈ [0.9] + end +end diff --git a/test/test_operators.jl b/test/test_operators.jl index e7f0c5b07..4af03497e 100644 --- a/test/test_operators.jl +++ b/test/test_operators.jl @@ -134,13 +134,13 @@ end @test_nowarn SymbolicRegression.assert_operators_well_defined(T, options) end - using SymbolicRegression.CoreModule.OptionsModule: inverse_binopmap + using SymbolicRegression.CoreModule.OptionsModule: inverse_opmap # Test inverse mapping for comparison operators - @test inverse_binopmap(greater) == (>) - @test inverse_binopmap(less) == (<) - @test inverse_binopmap(greater_equal) == (>=) - @test inverse_binopmap(less_equal) == (<=) + @test inverse_opmap(greater) == (>) + @test inverse_opmap(less) == (<) + @test inverse_opmap(greater_equal) == (>=) + @test inverse_opmap(less_equal) == (<=) end @testitem "Built-in operators pass validation for complex numbers" tags = [:part2] begin @@ -278,6 +278,10 @@ end @test iszero(deriv_invalid) end + # On ForwardDiff v1+, this becomes `!isfinite(x)`, + # but on earlier versions, invalid inputs returned `0.0`. + zero_or_nonfinite(x) = iszero(x) || !isfinite(x) + # Test safe_pow separately since it's binary for x in [0.5, 2.0], y in [2.0, 0.5] # Test valid derivatives @@ -287,9 +291,35 @@ end @test !isnan(deriv_y) @test !iszero(deriv_x) # Should be non-zero for our test points - # Test invalid cases return 0.0 derivatives - @test iszero(ForwardDiff.derivative(x -> safe_pow(x, -1.0), 0.0)) # 0^(-1) - @test iszero(ForwardDiff.derivative(x -> safe_pow(-x, 0.5), 1.0)) # (-x)^0.5 - @test iszero(ForwardDiff.derivative(x -> safe_pow(x, -0.5), 0.0)) # 0^(-0.5) + # Test invalid cases return non-finite or zero derivatives + @test zero_or_nonfinite(ForwardDiff.derivative(x -> safe_pow(x, -1.0), 0.0)) # 0^(-1) + @test iszero(ForwardDiff.derivative(x -> safe_pow(-x, 0.5), 1.0)) + @test zero_or_nonfinite(ForwardDiff.derivative(x -> safe_pow(x, -0.5), 0.0)) # 0^(-0.5) end end + +@testitem "user_provided_operators applies safe operator mappings" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: safe_log, safe_pow, safe_sqrt + using DynamicExpressions: OperatorEnum + + # Test that when user_provided_operators=true, operators get mapped through opmap + # This was a bug where user-provided operators weren't being mapped to safe versions + + # Create operators with regular (potentially unsafe) functions + operators = OperatorEnum( + 1 => (log, sqrt), # Should become safe_log, safe_sqrt + 2 => (+, -, (^)), # ^ should become safe_pow + ) + + # Create options with user_provided_operators=true + options = Options(; operators) + + # Verify that the operators were mapped to their safe versions + @test options.operators.ops[1] == (safe_log, safe_sqrt) + @test options.operators.ops[2] == (+, -, safe_pow) + + # Also test accessing via convenience properties + @test options.operators.unaops == (safe_log, safe_sqrt) + @test options.operators.binops == (+, -, safe_pow) +end diff --git a/test/test_options.jl b/test/test_options.jl index 9c7bc0d99..6a11fd5f7 100644 --- a/test/test_options.jl +++ b/test/test_options.jl @@ -1,15 +1,115 @@ -using SymbolicRegression -using Optim: Optim +@testitem "Test options" tags = [:part1] begin + using SymbolicRegression + using Optim: Optim -# testing types -op = Options(; optimizer_options=(iterations=16, f_calls_limit=100, x_tol=1e-16)); -@test isa(op.optimizer_options, Optim.Options) + # testing types + op = Options(; optimizer_options=(iterations=16, f_calls_limit=100, x_abstol=1e-16)) + @test isa(op.optimizer_options, Optim.Options) -op = Options(; - optimizer_options=Dict(:iterations => 32, :g_calls_limit => 50, :f_tol => 1e-16) -); -@test isa(op.optimizer_options, Optim.Options) + op = Options(; + optimizer_options=Dict(:iterations => 32, :g_calls_limit => 50, :f_reltol => 1e-16) + ) + @test isa(op.optimizer_options, Optim.Options) -optim_op = Optim.Options(; iterations=16) -op = Options(; optimizer_options=optim_op); -@test isa(op.optimizer_options, Optim.Options) + optim_op = Optim.Options(; iterations=16) + op = Options(; optimizer_options=optim_op) + @test isa(op.optimizer_options, Optim.Options) + + # testing loss_scale parameter + op_log = Options(; loss_scale=:log) + @test op_log.loss_scale == :log + + op_linear = Options(; loss_scale=:linear) + @test op_linear.loss_scale == :linear + + # test that invalid loss_scale values are caught + @test_throws AssertionError Options(; loss_scale=:invalid) + @test_throws AssertionError Options(; loss_scale=:cubic) +end + +@testitem "Test operators parameter conflicts" tags = [:part1] begin + using SymbolicRegression + using DynamicExpressions: OperatorEnum + + # Test that when operators is provided, we can't also provide individual sets + operators = OperatorEnum(1 => (sin, cos), 2 => (+, *, -)) + @test_throws AssertionError Options(; operators, binary_operators=(+, *)) + @test_throws AssertionError Options(; operators, unary_operators=(sin,)) + + # Test that when operators is provided, operator_enum_constructor should be nothing + @test_throws AssertionError Options(; operators, operator_enum_constructor=OperatorEnum) + + # Test that providing operators alone works fine (should not throw) + @test_nowarn Options(; operators) +end + +@testitem "Test operators stored globally" tags = [:part1] begin + using SymbolicRegression + using DynamicExpressions.OperatorEnumConstructionModule: LATEST_OPERATORS + + operators = OperatorEnum(1 => [sin, cos], 2 => [+, -, *], 3 => [fma], 5 => [max]) + options = Options(; operators) + + @test LATEST_OPERATORS[] == operators +end + +@testitem "Test with_max_degree_from_context" tags = [:part1] begin + using SymbolicRegression + + operators = OperatorEnum(1 => (sin, cos), 2 => (+, *, -)) + @test Options(; node_type=GraphNode, operators).node_type <: GraphNode{<:Any,2} + @test Options(; node_type=Node, operators).node_type <: Node{<:Any,2} + + operators = OperatorEnum(1 => (sin, cos), 2 => ()) + @test Options(; node_type=Node{<:Any,1}, operators).node_type <: Node{<:Any,1} + + @test Options().node_type <: Node{<:Any,2} + + operators = OperatorEnum(1 => (sin, cos), 2 => (+, *, -), 3 => (fma, max)) + options = Options(; operators) + @test options.node_type <: Node{<:Any,3} + @test options.op_constraints == + ([-1, -1], [(-1, -1), (-1, -1), (-1, -1)], [(-1, -1, -1), (-1, -1, -1)]) + @test options.nops == (2, 3, 2) +end + +@testitem "Test operator appears in multiple degrees error" tags = [:part1] begin + using SymbolicRegression + + operators = OperatorEnum(1 => (+, sin), 2 => (+, *)) # + appears in both degrees + + @test_throws( + "Operator + appears in multiple degrees. You can't use nested constraints.", + Options(; operators, nested_constraints=[(+) => [(+) => 0]]) + ) + + @test_throws( + "Operator + appears in multiple degrees. You can't use constraints.", + Options(; operators, constraints=[(+) => -1]) + ) +end + +@testitem "Test build_constraints with pre-processed vector format" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression.CoreModule.OptionsModule: build_constraints + using DynamicExpressions: OperatorEnum + + operators = OperatorEnum(1 => (sin, cos), 2 => (+, *, -), 5 => (max,)) + + constraints_processed = ( + [-1, -1], [(-1, -1), (-1, -1), (-1, -1)], nothing, nothing, [(-1, -1, -1, -1, -1)] + ) + + result = build_constraints(; + constraints=constraints_processed, operators_by_degree=operators.ops + ) + + # Verify the result matches expected format (fills empty slots with default values) + @test result == ( + [-1, -1], + [(-1, -1), (-1, -1), (-1, -1)], + NTuple{3,Int}[], + NTuple{4,Int}[], + [(-1, -1, -1, -1, -1)], + ) +end diff --git a/test/test_parametric_template_expressions.jl b/test/test_parametric_template_expressions.jl index e07415b72..bf32fb01a 100644 --- a/test/test_parametric_template_expressions.jl +++ b/test/test_parametric_template_expressions.jl @@ -29,8 +29,8 @@ end variable_names = ["x"] - # Error for missing parameters - @test_throws "Expected `parameters` to be provided" TemplateExpression( + # Test auto-initialization of parameters when not provided + expr_auto_init = TemplateExpression( (; f=ComposableExpression( Node{Float64}(; feature=1); operators=Options().operators, variable_names @@ -40,6 +40,10 @@ end operators=Options().operators, variable_names, ) + @test expr_auto_init isa TemplateExpression + @test haskey(get_metadata(expr_auto_init).parameters, :p) + @test length(get_metadata(expr_auto_init).parameters.p._data) == 2 + @test all(==(0.0), get_metadata(expr_auto_init).parameters.p._data) # Error for wrong parameter vector length @test_throws "Expected `parameters.p` to have length 2, got 1" TemplateExpression( @@ -199,8 +203,7 @@ end # Parametrized Template Expressions Template expressions in SymbolicRegression.jl can include parametric forms - expressions with tunable constants - that are optimized during the search. This can even include learn class-specific parameters that vary by category, - analogous to `ParametricExpression`s. + that are optimized during the search. This can even include class-specific parameters that vary by category. In this tutorial, we'll demonstrate how to use parametric template expressions to learn a model where: diff --git a/test/test_pretty_printing.jl b/test/test_pretty_printing.jl index a36a0f7e9..d09d07100 100644 --- a/test/test_pretty_printing.jl +++ b/test/test_pretty_printing.jl @@ -13,7 +13,7 @@ dataset = Dataset(X, y) member = PopMember(dataset, ex, options; deterministic=false) member.cost = 1.0 - @test member isa PopMember{Float64,Float64,<:Expression{Float64,Node{Float64}}} + @test member isa PopMember{Float64,Float64,<:Expression{Float64,<:Node{Float64}}} s_member = shower(member) @test s_member == "PopMember(tree = ((x ^ 2.0) + 1.5), loss = 16.25, cost = 1.0)" @@ -42,7 +42,7 @@ end dataset = Dataset(X, y) member = PopMember(dataset, ex, options; deterministic=false) member.cost = 1.0 - @test member isa PopMember{Float64,Float64,<:Expression{Float64,Node{Float64}}} + @test member isa PopMember{Float64,Float64,<:Expression{Float64,<:Node{Float64}}} hof = HallOfFame(options, dataset) hof = embed_metadata(hof, options, dataset) diff --git a/test/test_rotation.jl b/test/test_rotation.jl index 631cc107a..6882bf2de 100644 --- a/test/test_rotation.jl +++ b/test/test_rotation.jl @@ -9,21 +9,29 @@ # No-op: @test randomly_rotate_tree!(x1) === x1 - expr = 1.5 * x1 + x2 + # There's also no change to a single op: + @test length(Set([randomly_rotate_tree!(x1 + x2) for _ in 1:100])) == 1 - # (+) -> ((*) -> (1.5, x1), x2) - # Should get rotated to - # (*) -> (1.5, (+) -> (x1, x2)) + expr = (1.5 * x1) + x2 - @test randomly_rotate_tree!(copy(expr)) == 1.5 * (x1 + x2) + # (+) -> ((*) -> (1.5, x1), x2) + # Should get rotated to one of + # (*) -> (1.5, (+) -> (x1, x2)) + # OR + # (*) -> ((+) -> (1.5, x1), x2) + # OR + # (*) -> ((+) -> (1.5, x2), x1) - # The only rotation option on this tree is to rotate back: - @test randomly_rotate_tree!(randomly_rotate_tree!(copy(expr))) == expr + for _ in 1:100 + @test randomly_rotate_tree!(copy(expr)) in + (1.5 * (x1 + x2), (1.5 + x1) * x2, (1.5 + x2) * x1) + end end @testitem "Complex `randomly_rotate_tree!`" tags = [:part1] begin using SymbolicRegression using SymbolicRegression.MutationFunctionsModule: randomly_rotate_tree! + using Random: MersenneTwister # Create a simple binary tree structure directly options = Options(; binary_operators=(+, *, -, /), unary_operators=(cos, exp)) @@ -31,39 +39,57 @@ end expr = (1.5 * x1) + (2.5 / x3) - # Multiple rotations possible: - # (+) -> ((*) -> (1.5, x1), (/) -> (2.5, x3)) + # Multiple rotations possible for + # (+) -> ((*) -> (1.5, x1), (/) -> (2.5, x3)) + # This can either get rotated to - # (*) -> (1.5, (+) -> (x1, (/) -> (2.5, x3))) + # (*) -> (1.5, (+) -> (x1, (/) -> (2.5, x3))) # OR - # (/) -> ((+) -> ((*) -> (1.5, x1), 2.5), x3) + # (/) -> ((+) -> ((*) -> (1.5, x1), 2.5), x3) + # OR + # (*) -> ((+) -> (1.5, (/) -> (2.5, x3)), x1) + # OR + # (/) -> (2.5, (+) -> ((*) -> (1.5, x1), x3)) - outs = Set([randomly_rotate_tree!(copy(expr)) for _ in 1:100]) + rng = MersenneTwister(0) + outs = Set([randomly_rotate_tree!(copy(expr), rng) for _ in 1:300]) - @test outs == Set([((1.5 * x1) + 2.5) / x3, 1.5 * (x1 + (2.5 / x3))]) + @test outs == Set([ + 1.5 * (x1 + (2.5 / x3)), + ((1.5 * x1) + 2.5) / x3, + (1.5 + (2.5 / x3)) * x1, + 2.5 / ((1.5 * x1) + x3), + ]) # If we have a unary operator in the mix, both of these options are valid (with # the unary operator moved in). We also have a third option that rotates with # the unary operator acting as a pivot. expr = (1.5 * exp(x1)) + (2.5 / x3) - outs = Set([randomly_rotate_tree!(copy(expr)) for _ in 1:300]) + rng = MersenneTwister(0) + outs = Set([randomly_rotate_tree!(copy(expr), rng) for _ in 1:300]) @test outs == Set([ ((1.5 * exp(x1)) + 2.5) / x3, 1.5 * (exp(x1) + (2.5 / x3)), exp(1.5 * x1) + (2.5 / x3), + (1.5 + (2.5 / x3)) * exp(x1), + 2.5 / ((1.5 * exp(x1)) + x3), ]) - # Basically this third option does a rotation on the `*`: + # Note that we can do a rotation on the `*` _through_ the unary operator: # (*) -> (1.5, (exp) -> (x1,)) # to # (exp) -> ((*) -> (1.5, x1),) # Or, if the unary operator is at the top: expr = exp((1.5 * x1) + (2.5 / x3)) - outs = Set([randomly_rotate_tree!(copy(expr)) for _ in 1:300]) + rng = MersenneTwister(0) + outs = Set([randomly_rotate_tree!(copy(expr), rng) for _ in 1:500]) @test outs == Set([ + exp(1.5 * x1) + (2.5 / x3), + exp(2.5 / ((1.5 * x1) + x3)), exp(((1.5 * x1) + 2.5) / x3), exp(1.5 * (x1 + (2.5 / x3))), + exp((1.5 + (2.5 / x3)) * x1), # Rotate with `exp` as the *root*: (1.5 * x1) + exp(2.5 / x3), ]) diff --git a/test/test_template_expression.jl b/test/test_template_expression.jl index 356e3664d..ccf14eb09 100644 --- a/test/test_template_expression.jl +++ b/test/test_template_expression.jl @@ -612,7 +612,7 @@ end @test loss_batch ≈ expected_batch_loss end -@testitem "warning for loss_function with TemplateExpression" begin +@testitem "warning for loss_function with TemplateExpression" tags = [:part2] begin using SymbolicRegression @test_warn( @@ -624,3 +624,106 @@ end ) ) end + +@testitem "TemplateExpression guess validation" tags = [:part1] begin + using SymbolicRegression + using SymbolicRegression: parse_guesses, Dataset, PopMember + + # Create test data + X = Float64[1.0 2.0; 3.0 4.0] + y = Float64[5.0, 6.0] + dataset = Dataset(X, y) + template = @template_spec(expressions = (f, g)) do x1, x2 + f(x1, x2) + g(x1, x2) + end + options = Options(; expression_spec=template) + + # Test that using actual variable names throws an error + bad_guess = (; f="x1 + x2", g="x1 * x2") + @test_throws( + ArgumentError( + "Found variable name 'x1' in TemplateExpression guess. Use placeholder syntax '#1', '#2', etc., (for argument 1, 2, etc.) instead of actual variable names.", + ), + parse_guesses(PopMember{Float64,Float64}, [bad_guess], [dataset], options) + ) + + # Similar with custom variable names + dataset = Dataset(X, y; variable_names=["alpha", "beta"]) + bad_guess = (; f="alpha + beta", g="alpha * beta") + @test_throws( + ArgumentError( + "Found variable name 'alpha' in TemplateExpression guess. Use placeholder syntax '#1', '#2', etc., (for argument 1, 2, etc.) instead of actual variable names.", + ), + parse_guesses(PopMember{Float64,Float64}, [bad_guess], [dataset], options) + ) +end + +@testitem "Template expression return validation" tags = [:part2] begin + using SymbolicRegression: + TemplateReturnError, + ValidVector, + ComposableExpression, + TemplateStructure, + TemplateExpression + using DynamicExpressions: OperatorEnum, Node + + operators = OperatorEnum(; binary_operators=(+, *, /, -), unary_operators=(sin, cos)) + x1 = ComposableExpression(Node{Float64}(; feature=1); operators, variable_names=nothing) + + # Test that returning a regular vector from template expression throws TemplateReturnError + bad_structure = TemplateStructure{(:f,)}( + ((; f), (x,)) -> [1.0, 2.0]; # Returns regular Vector instead of ValidVector + num_features=(; f=1), + ) + bad_expr = TemplateExpression( + (; f=x1); structure=bad_structure, operators, variable_names=nothing + ) + X = [1.0 2.0]' + + function get_error_msg(err) + io = IOBuffer() + Base.showerror(io, err) + return String(take!(io)) + end + + err = @test_throws TemplateReturnError bad_expr(X) + msg = get_error_msg(err.value) + @test contains(msg, "Template expression returned a regular Vector") + @test contains(msg, "ValidVector is required") + @test contains(msg, "ValidVector(my_data, computation_is_valid)") +end + +@testitem "Test Float32/Float64 type conversion in TemplateExpression" tags = [:part2] begin + using SymbolicRegression + using SymbolicRegression: eval_loss + using SymbolicRegression.TemplateExpressionModule: _match_input_eltype + + template = @template_spec(expressions = (f,)) do x1, x2 + 0.5 * f(x1, x2) # 0.5 is Float64 literal + end + + options = Options(; binary_operators=[+, *, /, -], expression_spec=template) + x1 = ComposableExpression(Node{Float32}(; feature=1); operators=options.operators) + x2 = ComposableExpression(Node{Float32}(; feature=2); operators=options.operators) + f_expr = x1 + x2 + + template_expr = TemplateExpression( + (; f=f_expr); structure=template.structure, operators=options.operators + ) + + X = Float32[1.0 2.0; 3.0 4.0] + result = template_expr(X) + @test result isa Vector{Float32} + + y = Float32[2.0, 3.0] + dataset = Dataset(X, y) + loss = eval_loss(template_expr, dataset, options) + @test loss isa Float32 + @test loss ≈ 0.0 + + # Test _match_input_eltype coverage (covers lines 675-676) + result_f64 = [1.0, 2.0] + @test _match_input_eltype(Matrix{Float64}, result_f64) === result_f64 # Same type + result_int = [1, 2] + @test _match_input_eltype(Matrix{Float64}, result_int) === result_int # Non-float type +end diff --git a/test/test_template_macro.jl b/test/test_template_macro.jl index 5281bec75..190070464 100644 --- a/test/test_template_macro.jl +++ b/test/test_template_macro.jl @@ -188,3 +188,15 @@ end template_spec(:((x,) -> f(x)), :(expressions = (f,)), :extra_arg) ) end + +@testitem "Template macro with num_features parameter" tags = [:part1, :template_macro] begin + using SymbolicRegression + using DynamicExpressions: OperatorEnum, Node + + # Test template with num_features + expr_spec = @template_spec(expressions = (f,), num_features = (f=5,)) do x1, x2 + return x1^2 + f(x1, x2) # Normal inference would infer (f=2,) + end + + @test expr_spec.structure.num_features == (f=5,) +end From 8c033c9fa2e2b28a1aee32e696e70da02b02b91b Mon Sep 17 00:00:00 2001 From: Atharva Sehgal Date: Fri, 10 Oct 2025 10:55:16 +0000 Subject: [PATCH 6/6] missing dependency --- test/test_units.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/test/test_units.jl b/test/test_units.jl index 9a9d0338b..05a2575e7 100644 --- a/test/test_units.jl +++ b/test/test_units.jl @@ -432,6 +432,7 @@ end @testitem "Miscellaneous tests of unit interface" tags = [:part3] begin using SymbolicRegression using DynamicQuantities + using MLJBase using SymbolicRegression.DimensionalAnalysisModule: @maybe_return_call, WildcardQuantity using SymbolicRegression.MLJInterfaceModule: unwrap_units_single using SymbolicRegression.InterfaceDynamicQuantitiesModule: get_dimensions_type