diff --git a/.editorconfig b/.editorconfig index 866a4a94..d6c21444 100644 --- a/.editorconfig +++ b/.editorconfig @@ -18,12 +18,213 @@ indent_size = 2 [*.{props,targets,ruleset,config,nuspec,resx,vsixmanifest,vsct}] indent_size = 2 +# XAML project files +[*.{xaml}] +indent_size = 2 + +# JSON files +[*.{json}] +indent_size = 4 + # Code files [*.{cs,csx,vb,vbx}] indent_size = 4 insert_final_newline = true charset = utf-8-bom +# +# Naming rules +# +dotnet_naming_rule.types_and_namespaces_should_be_pascalcase.severity = suggestion +dotnet_naming_rule.types_and_namespaces_should_be_pascalcase.symbols = types_and_namespaces +dotnet_naming_rule.types_and_namespaces_should_be_pascalcase.style = pascalcase + +dotnet_naming_rule.interfaces_should_be_ipascalcase.severity = suggestion +dotnet_naming_rule.interfaces_should_be_ipascalcase.symbols = interfaces +dotnet_naming_rule.interfaces_should_be_ipascalcase.style = ipascalcase + +dotnet_naming_rule.type_parameters_should_be_tpascalcase.severity = suggestion +dotnet_naming_rule.type_parameters_should_be_tpascalcase.symbols = type_parameters +dotnet_naming_rule.type_parameters_should_be_tpascalcase.style = tpascalcase + +dotnet_naming_rule.methods_should_be_pascalcase.severity = suggestion +dotnet_naming_rule.methods_should_be_pascalcase.symbols = methods +dotnet_naming_rule.methods_should_be_pascalcase.style = pascalcase + +dotnet_naming_rule.properties_should_be_pascalcase.severity = suggestion +dotnet_naming_rule.properties_should_be_pascalcase.symbols = properties +dotnet_naming_rule.properties_should_be_pascalcase.style = pascalcase + +dotnet_naming_rule.events_should_be_pascalcase.severity = suggestion +dotnet_naming_rule.events_should_be_pascalcase.symbols = events +dotnet_naming_rule.events_should_be_pascalcase.style = pascalcase + +dotnet_naming_rule.local_variables_should_be_camelcase.severity = suggestion +dotnet_naming_rule.local_variables_should_be_camelcase.symbols = local_variables +dotnet_naming_rule.local_variables_should_be_camelcase.style = camelcase + +# Differs from other repos like BaseApp since normal usage here +dotnet_naming_rule.local_constants_should_be_camelcase.severity = suggestion +dotnet_naming_rule.local_constants_should_be_camelcase.symbols = local_constants +dotnet_naming_rule.local_constants_should_be_camelcase.style = camelcase + +dotnet_naming_rule.parameters_should_be_camelcase.severity = suggestion +dotnet_naming_rule.parameters_should_be_camelcase.symbols = parameters +dotnet_naming_rule.parameters_should_be_camelcase.style = camelcase + +dotnet_naming_rule.public_fields_should_be_pascalcase.severity = suggestion +dotnet_naming_rule.public_fields_should_be_pascalcase.symbols = public_fields +dotnet_naming_rule.public_fields_should_be_pascalcase.style = pascalcase + +dotnet_naming_rule.private_fields_should_be_m_camelcase.severity = suggestion +dotnet_naming_rule.private_fields_should_be_m_camelcase.symbols = private_fields +dotnet_naming_rule.private_fields_should_be_m_camelcase.style = m_camelcase + +dotnet_naming_rule.public_constant_fields_should_be_pascalcase.severity = suggestion +dotnet_naming_rule.public_constant_fields_should_be_pascalcase.symbols = public_constant_fields +dotnet_naming_rule.public_constant_fields_should_be_pascalcase.style = pascalcase + +dotnet_naming_rule.private_constant_fields_should_be_pascalcase.severity = suggestion +dotnet_naming_rule.private_constant_fields_should_be_pascalcase.symbols = private_constant_fields +dotnet_naming_rule.private_constant_fields_should_be_pascalcase.style = pascalcase + +dotnet_naming_rule.public_static_readonly_fields_should_be_pascalcase.severity = suggestion +dotnet_naming_rule.public_static_readonly_fields_should_be_pascalcase.symbols = public_static_readonly_fields +dotnet_naming_rule.public_static_readonly_fields_should_be_pascalcase.style = pascalcase + +dotnet_naming_rule.private_static_readonly_fields_should_be_pascalcase.severity = suggestion +dotnet_naming_rule.private_static_readonly_fields_should_be_pascalcase.symbols = private_static_readonly_fields +dotnet_naming_rule.private_static_readonly_fields_should_be_pascalcase.style = pascalcase + +dotnet_naming_rule.private_static_fields_should_be_s_camelcase.severity = suggestion +dotnet_naming_rule.private_static_fields_should_be_s_camelcase.symbols = private_static_fields +dotnet_naming_rule.private_static_fields_should_be_s_camelcase.style = s_camelcase + +dotnet_naming_rule.enums_should_be_pascalcase.severity = suggestion +dotnet_naming_rule.enums_should_be_pascalcase.symbols = enums +dotnet_naming_rule.enums_should_be_pascalcase.style = pascalcase + +dotnet_naming_rule.local_functions_should_be_pascalcase.severity = suggestion +dotnet_naming_rule.local_functions_should_be_pascalcase.symbols = local_functions +dotnet_naming_rule.local_functions_should_be_pascalcase.style = pascalcase + +dotnet_naming_rule.non_field_members_should_be_pascalcase.severity = suggestion +dotnet_naming_rule.non_field_members_should_be_pascalcase.symbols = non_field_members +dotnet_naming_rule.non_field_members_should_be_pascalcase.style = pascalcase + +# +# Symbol specifications +# +dotnet_naming_symbols.interfaces.applicable_kinds = interface +dotnet_naming_symbols.interfaces.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.interfaces.required_modifiers = + +dotnet_naming_symbols.enums.applicable_kinds = enum +dotnet_naming_symbols.enums.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.enums.required_modifiers = + +dotnet_naming_symbols.events.applicable_kinds = event +dotnet_naming_symbols.events.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.events.required_modifiers = + +dotnet_naming_symbols.methods.applicable_kinds = method +dotnet_naming_symbols.methods.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.methods.required_modifiers = + +dotnet_naming_symbols.properties.applicable_kinds = property +dotnet_naming_symbols.properties.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.properties.required_modifiers = + +dotnet_naming_symbols.public_fields.applicable_kinds = field +dotnet_naming_symbols.public_fields.applicable_accessibilities = public, internal +dotnet_naming_symbols.public_fields.required_modifiers = + +dotnet_naming_symbols.private_fields.applicable_kinds = field +dotnet_naming_symbols.private_fields.applicable_accessibilities = private, protected, protected_internal, private_protected +dotnet_naming_symbols.private_fields.required_modifiers = + +dotnet_naming_symbols.private_static_fields.applicable_kinds = field +dotnet_naming_symbols.private_static_fields.applicable_accessibilities = private, protected, protected_internal, private_protected +dotnet_naming_symbols.private_static_fields.required_modifiers = static + +dotnet_naming_symbols.types_and_namespaces.applicable_kinds = namespace, class, struct, interface, enum +dotnet_naming_symbols.types_and_namespaces.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.types_and_namespaces.required_modifiers = + +dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method +dotnet_naming_symbols.non_field_members.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.non_field_members.required_modifiers = + +dotnet_naming_symbols.type_parameters.applicable_kinds = namespace +dotnet_naming_symbols.type_parameters.applicable_accessibilities = * +dotnet_naming_symbols.type_parameters.required_modifiers = + +dotnet_naming_symbols.private_constant_fields.applicable_kinds = field +dotnet_naming_symbols.private_constant_fields.applicable_accessibilities = private, protected, protected_internal, private_protected +dotnet_naming_symbols.private_constant_fields.required_modifiers = const + +dotnet_naming_symbols.local_variables.applicable_kinds = local +dotnet_naming_symbols.local_variables.applicable_accessibilities = local +dotnet_naming_symbols.local_variables.required_modifiers = + +dotnet_naming_symbols.local_constants.applicable_kinds = local +dotnet_naming_symbols.local_constants.applicable_accessibilities = local +dotnet_naming_symbols.local_constants.required_modifiers = const + +dotnet_naming_symbols.parameters.applicable_kinds = parameter +dotnet_naming_symbols.parameters.applicable_accessibilities = * +dotnet_naming_symbols.parameters.required_modifiers = + +dotnet_naming_symbols.public_constant_fields.applicable_kinds = field +dotnet_naming_symbols.public_constant_fields.applicable_accessibilities = public, internal +dotnet_naming_symbols.public_constant_fields.required_modifiers = const + +dotnet_naming_symbols.public_static_readonly_fields.applicable_kinds = field +dotnet_naming_symbols.public_static_readonly_fields.applicable_accessibilities = public, internal +dotnet_naming_symbols.public_static_readonly_fields.required_modifiers = readonly, static + +dotnet_naming_symbols.private_static_readonly_fields.applicable_kinds = field +dotnet_naming_symbols.private_static_readonly_fields.applicable_accessibilities = private, protected, protected_internal, private_protected +dotnet_naming_symbols.private_static_readonly_fields.required_modifiers = readonly, static + +dotnet_naming_symbols.local_functions.applicable_kinds = local_function +dotnet_naming_symbols.local_functions.applicable_accessibilities = * +dotnet_naming_symbols.local_functions.required_modifiers = + +# +# Naming styles +# +dotnet_naming_style.pascalcase.required_prefix = +dotnet_naming_style.pascalcase.required_suffix = +dotnet_naming_style.pascalcase.word_separator = +dotnet_naming_style.pascalcase.capitalization = pascal_case + +dotnet_naming_style.ipascalcase.required_prefix = I +dotnet_naming_style.ipascalcase.required_suffix = +dotnet_naming_style.ipascalcase.word_separator = +dotnet_naming_style.ipascalcase.capitalization = pascal_case + +dotnet_naming_style.tpascalcase.required_prefix = T +dotnet_naming_style.tpascalcase.required_suffix = +dotnet_naming_style.tpascalcase.word_separator = +dotnet_naming_style.tpascalcase.capitalization = pascal_case + +dotnet_naming_style.m_camelcase.required_prefix = m_ +dotnet_naming_style.m_camelcase.required_suffix = +dotnet_naming_style.m_camelcase.word_separator = +dotnet_naming_style.m_camelcase.capitalization = camel_case + +dotnet_naming_style.camelcase.required_prefix = +dotnet_naming_style.camelcase.required_suffix = +dotnet_naming_style.camelcase.word_separator = +dotnet_naming_style.camelcase.capitalization = camel_case + +dotnet_naming_style.s_camelcase.required_prefix = s_ +dotnet_naming_style.s_camelcase.required_suffix = +dotnet_naming_style.s_camelcase.word_separator = +dotnet_naming_style.s_camelcase.capitalization = camel_case + + # C# files [*.cs] @@ -52,6 +253,7 @@ csharp_new_line_before_open_brace = accessors, lambdas, types, methods, properti #sort System.* using directives alphabetically, and place them before other usings dotnet_sort_system_directives_first = true +dotnet_style_prefer_collection_expression = true csharp_style_namespace_declarations = file_scoped:warning @@ -128,53 +330,6 @@ dotnet_style_require_accessibility_modifiers = omit_if_default:error # Ensure readonly fields are marked as such dotnet_style_readonly_field = true:error -# Prefix private members with underscore -dotnet_naming_rule.private_members_with_underscore.symbols = private_members -dotnet_naming_rule.private_members_with_underscore.style = prefix_underscore -dotnet_naming_rule.private_members_with_underscore.severity = suggestion - -dotnet_naming_symbols.private_members.applicable_kinds = field -dotnet_naming_symbols.private_members.applicable_accessibilities = private -dotnet_naming_symbols.private_members.required_modifiers = readonly - -dotnet_naming_style.prefix_underscore.capitalization = camel_case -dotnet_naming_style.prefix_underscore.required_prefix = m_ -dotnet_naming_style.prefix_underscore.required_suffix = -dotnet_naming_style.prefix_underscore.word_separator = - -# Non-public instance fields are camelCase and start with m_ -dotnet_naming_rule.instance_fields_should_be_camel_case.severity = suggestion -dotnet_naming_rule.instance_fields_should_be_camel_case.symbols = instance_fields -dotnet_naming_rule.instance_fields_should_be_camel_case.style = instance_field_style - -dotnet_naming_symbols.instance_fields.applicable_kinds = field -dotnet_naming_symbols.instance_fields.applicable_accessibilities = internal, protected, protected_internal, private_protected, private - -dotnet_naming_style.instance_field_style.capitalization = camel_case -dotnet_naming_style.instance_field_style.required_prefix = m_ - -# static readonly PascalCase -dotnet_naming_rule.static_readonly_pascal_case.symbols = static_readonly -dotnet_naming_rule.static_readonly_pascal_case.style = pascal_case -dotnet_naming_rule.static_readonly_pascal_case.severity = suggestion - -dotnet_naming_style.static_readonly.applicable_kinds = field -dotnet_naming_symbols.static_readonly.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected -dotnet_naming_symbols.static_readonly.required_modifiers = static, readonly - -dotnet_naming_style.pascal_case.capitalization = pascal_case - -# const PascalCase -dotnet_naming_rule.const_pascal_case.symbols = all_const -dotnet_naming_rule.const_pascal_case.style = pascal_case -dotnet_naming_rule.const_pascal_case.severity = suggestion - -dotnet_naming_style.all_const.applicable_kinds = field -dotnet_naming_symbols.all_const.applicable_accessibilities = public, internal, protected, protected_internal, private_protected, private -dotnet_naming_symbols.all_const.required_modifiers = const - -dotnet_naming_style.pascal_case.capitalization = pascal_case - # Null-checking preferences csharp_style_throw_expression = true:error @@ -205,6 +360,9 @@ dotnet_diagnostic.IDE0051.severity = warning # IDE0066: Convert switch statement to expression dotnet_diagnostic.IDE0066.severity = silent +# IDE1006: Naming Styles +dotnet_diagnostic.IDE1006.severity = warning + # CA2211: Non-constant fields should not be visible dotnet_diagnostic.CA2211.severity = warning @@ -254,7 +412,7 @@ dotnet_diagnostic.CA1851.severity = warning dotnet_diagnostic.CA1834.severity = warning # CA2007: Do not directly await a Task -dotnet_diagnostic.CA2007.severity = warning +dotnet_diagnostic.CA2007.severity = silent # CA2008: Do not create tasks without passing a TaskScheduler dotnet_diagnostic.CA2008.severity = warning @@ -284,7 +442,7 @@ dotnet_diagnostic.IDE0044.severity = warning dotnet_diagnostic.IDE0005.severity = warning # CA1002: Do not expose generic lists (don't follow suggestion text prefer IReadOnlyList) -dotnet_diagnostic.CA1002.severity = suggestion +dotnet_diagnostic.CA1002.severity = none # CA1003: Use generic event handler instances (never follow this - it's dumb coupling) dotnet_diagnostic.CA1003.severity = none # CA1005: Avoid excessive parameters on generic types (rarely relevant) @@ -303,20 +461,16 @@ dotnet_diagnostic.CA1031.severity = none dotnet_diagnostic.CA1032.severity = none # CA1040: Avoid empty interfaces (completely ignore this, empty marker interfaces are fine) dotnet_diagnostic.CA1040.severity = none +# CA1045: Do not pass types by reference +dotnet_diagnostic.CA1045.severity = none # CA1060: Move P/Invokes to NativeMethods class (a bit pedantic so not enforced) dotnet_diagnostic.CA1060.severity = none # CA1062: Validate arguments of public methods (perhaps relevant in widely used class library but less so at app level) dotnet_diagnostic.CA1062.severity = none # CA1305: Specify IFormatProvider (while definitely relevant in some cases also overly pedantic for all) dotnet_diagnostic.CA1305.severity = none -# CA1307: Specify StringComparison for clarity. -dotnet_diagnostic.CA1307.severity = none # CA1308: Normalize strings to uppercase (this will flag weird places and is considered irrelevant) dotnet_diagnostic.CA1308.severity = none -# CA1309: Use ordinal string comparison -dotnet_diagnostic.CA1309.severity = none -# CA1310: Specify StringComparison for correctness -dotnet_diagnostic.CA1310.severity = none # CA1501: Avoid excessive inheritance (too pedantic and view models will often violate it) dotnet_diagnostic.CA1501.severity = none # CA1506: Avoid excessive class coupling (composition/Autofac code intentionally violates this) @@ -352,11 +506,937 @@ dotnet_diagnostic.CA2225.severity = none # CA5394: Do not use insecure randomness (we don't use randomness for security) dotnet_diagnostic.CA5394.severity = none -# CS1573: Parameter 'parameter' has no matching param tag in the XML comment for 'parameter' (but other parameters do) +# CS1573: Parameter has no matching param tag in the XML comment (but other parameters do) dotnet_diagnostic.CS1573.severity = none -# CS1591: Missing XML comment for publicly visible type or member 'Type_or_Member' (we don't document everything) -dotnet_diagnostic.CS1591.severity = none -# SkiaSharp only contains distribution for net7-win-x64 so get a warning - trying to disable -# NETSDK1206: Found version-specific or distribution-specific runtime identifier(s) -dotnet_diagnostic.NETSDK1206.severity = none +# IDE0300: Use collection expression for array +dotnet_diagnostic.IDE0300.severity = warning +# IDE0301: Use collection expression for empty +dotnet_diagnostic.IDE0301.severity = warning + +# +# Roslynator (https://josefpihrt.github.io/docs/roslynator/configuration/) +# + +# Options + +roslynator_accessibility_modifiers = implicit +# Applicable to: rcs1018 + +roslynator_accessor_braces_style = single_line_when_expression_is_on_single_line +# Default: multi_line +# Applicable to: rcs0020 + +roslynator_array_creation_type_style = implicit +# Applicable to: rcs1014 + +roslynator_arrow_token_new_line = after +# Applicable to: rcs0032 + +roslynator_binary_operator_new_line = after +# Applicable to: rcs0027 + +roslynator_blank_line_after_file_scoped_namespace_declaration = true +# Applicable to: rcs0060 + +roslynator_blank_line_between_closing_brace_and_switch_section = true +# Applicable to: rcs0014, rcs1036 + +roslynator_blank_line_between_single_line_accessors = false +# Applicable to: rcs0011 + +roslynator_blank_line_between_switch_sections = omit +# Applicable to: rcs0061 + +roslynator_blank_line_between_using_directives = never +# Applicable to: rcs0015 + +roslynator_block_braces_style = single_line_when_empty +# Default: multi_line +# Applicable to: rcs0021 + +roslynator_body_style = expression +# Applicable to: rcs1016 + +roslynator_conditional_operator_condition_parentheses_style = omit_when_condition_is_single_token +# Applicable to: rcs1051 + +roslynator_conditional_operator_new_line = after +# Applicable to: rcs0028 + +roslynator_configure_await = false +# Applicable to: rcs1090 + +roslynator_doc_comment_summary_style = multi_line +# Applicable to: rcs1253 + +roslynator_empty_string_style = field +# Applicable to: rcs1078 + +# NOT ACTIVE +#roslynator_enum_flag_value_style = decimal_number +# Applicable to: rcs1254 + +roslynator_enum_has_flag_style = operator +# Applicable to: rcs1096 + +roslynator_equals_token_new_line = after +# Applicable to: rcs0052 + +roslynator_infinite_loop_style = while +# Applicable to: rcs1252 + +roslynator_max_line_length = 120 +# Default: 140 +# Applicable to: rcs0056 + +roslynator_new_line_at_end_of_file = true +# Applicable to: rcs0058 + +roslynator_new_line_before_while_in_do_statement = false +# Applicable to: rcs0051 + +roslynator_null_check_style = pattern_matching +# Applicable to: rcs1248 + +roslynator_null_conditional_operator_new_line = before +# Applicable to: rcs0059 + +roslynator_object_creation_parentheses_style = omit +# Applicable to: rcs1050 + +roslynator_object_creation_type_style = implicit +# Applicable to: rcs1250 + +roslynator_prefix_field_identifier_with_underscore = false + +roslynator_tab_length = 4 +# Default: 4 +# Applicable to: rcs0056 + +roslynator_trailing_comma_style = omit_when_single_line +# Applicable to: rcs1260 + +roslynator_unity_code_analysis.enabled = false +# Applicable to: rcs1169, rcs1213 + +roslynator_use_anonymous_function_or_method_group = method_group +# Applicable to: rcs1207 + +roslynator_use_block_body_when_declaration_spans_over_multiple_lines = true +# Applicable to: rcs1016 + +roslynator_use_block_body_when_expression_spans_over_multiple_lines = true +# Applicable to: rcs1016 + +roslynator_use_collection_expression = true +# Applicable to: rcs1014, rcs1250 + +roslynator_use_var = always +# Applicable to: rcs1264 + +roslynator_use_var_instead_of_implicit_object_creation = true +# Applicable to: rcs1250 + +# Analyzers + +# Add blank line after embedded statement +dotnet_diagnostic.rcs0001.severity = none + +# Add blank line after #region +dotnet_diagnostic.rcs0002.severity = none + +# Add blank line after using directive list +dotnet_diagnostic.rcs0003.severity = warning + +# Add blank line before #endregion +dotnet_diagnostic.rcs0005.severity = none + +# Add blank line before using directive list +dotnet_diagnostic.rcs0006.severity = none + +# Add blank line between accessors +dotnet_diagnostic.rcs0007.severity = none + +# Add blank line between closing brace and next statement +dotnet_diagnostic.rcs0008.severity = none + +# Add blank line between declaration and documentation comment +dotnet_diagnostic.rcs0009.severity = none + +# Add blank line between declarations +dotnet_diagnostic.rcs0010.severity = none + +# Add/remove blank line between single-line accessors +dotnet_diagnostic.rcs0011.severity = none +# Options: roslynator_blank_line_between_single_line_accessors + +# Add blank line between single-line declarations +dotnet_diagnostic.rcs0012.severity = none + +# Add blank line between single-line declarations of different kind +dotnet_diagnostic.rcs0013.severity = none + +# Add/remove blank line between using directives +dotnet_diagnostic.rcs0015.severity = none +# Options: roslynator_blank_line_between_using_directives + +# Put attribute list on its own line +dotnet_diagnostic.rcs0016.severity = none + +# Format accessor's braces on a single line or multiple lines +dotnet_diagnostic.rcs0020.severity = none +# Options: roslynator_accessor_braces_style + +# Format block's braces on a single line or multiple lines +dotnet_diagnostic.rcs0021.severity = none +# Options: roslynator_block_braces_style + +# Format type declaration's braces +dotnet_diagnostic.rcs0023.severity = none + +# Add new line after switch label +dotnet_diagnostic.rcs0024.severity = none + +# Put full accessor on its own line +dotnet_diagnostic.rcs0025.severity = none + +# Place new line after/before binary operator +dotnet_diagnostic.rcs0027.severity = none +# Options: roslynator_binary_operator_new_line + +# Place new line after/before '?:' operator +dotnet_diagnostic.rcs0028.severity = none +# Options: roslynator_conditional_operator_new_line + +# Put constructor initializer on its own line +dotnet_diagnostic.rcs0029.severity = none + +# Put embedded statement on its own line +dotnet_diagnostic.rcs0030.severity = none + +# Put enum member on its own line +dotnet_diagnostic.rcs0031.severity = none + +# Place new line after/before arrow token +dotnet_diagnostic.rcs0032.severity = none +# Options: roslynator_arrow_token_new_line + +# Put statement on its own line +dotnet_diagnostic.rcs0033.severity = none + +# Put type parameter constraint on its own line +dotnet_diagnostic.rcs0034.severity = none + +# Remove blank line between single-line declarations of same kind +dotnet_diagnostic.rcs0036.severity = none + +# Remove new line before base list +dotnet_diagnostic.rcs0039.severity = none + +# Remove new line between 'if' keyword and 'else' keyword +dotnet_diagnostic.rcs0041.severity = none + +# Put auto-accessors on a single line +dotnet_diagnostic.rcs0042.severity = none + +# Use carriage return + linefeed as new line +dotnet_diagnostic.rcs0044.severity = none + +# Use linefeed as new line +dotnet_diagnostic.rcs0045.severity = none + +# Use spaces instead of tab +dotnet_diagnostic.rcs0046.severity = warning + +# Put initializer on a single line +dotnet_diagnostic.rcs0048.severity = none + +# Add blank line after top comment +dotnet_diagnostic.rcs0049.severity = none + +# Add blank line before top declaration +dotnet_diagnostic.rcs0050.severity = none + +# Add/remove new line before 'while' in 'do' statement +dotnet_diagnostic.rcs0051.severity = none +# Options: roslynator_new_line_before_while_in_do_statement + +# Place new line after/before equals token +dotnet_diagnostic.rcs0052.severity = none +# Options: roslynator_equals_token_new_line + +# Fix formatting of a list +dotnet_diagnostic.rcs0053.severity = none + +# Fix formatting of a call chain +dotnet_diagnostic.rcs0054.severity = none + +# Fix formatting of a binary expression chain +dotnet_diagnostic.rcs0055.severity = none + +# A line is too long +dotnet_diagnostic.rcs0056.severity = none +# Options: roslynator_max_line_length, roslynator_tab_length + +# Normalize whitespace at the beginning of a file +dotnet_diagnostic.rcs0057.severity = none + +# Normalize whitespace at the end of a file +dotnet_diagnostic.rcs0058.severity = none +# Options: roslynator_new_line_at_end_of_file + +# Place new line after/before null-conditional operator +dotnet_diagnostic.rcs0059.severity = none +# Options: roslynator_null_conditional_operator_new_line + +# Add/remove line after file scoped namespace declaration +dotnet_diagnostic.rcs0060.severity = none +# Options: roslynator_blank_line_after_file_scoped_namespace_declaration + +# Add/remove blank line between switch sections +dotnet_diagnostic.rcs0061.severity = none +# Options: roslynator_blank_line_between_switch_sections + +# Add braces (when expression spans over multiple lines) +dotnet_diagnostic.rcs1001.severity = error + +# Remove braces +dotnet_diagnostic.rcs1002.severity = none + +# Add braces to if-else (when expression spans over multiple lines) +dotnet_diagnostic.rcs1003.severity = error + +# Remove braces from if-else +dotnet_diagnostic.rcs1004.severity = none + +# Simplify nested using statement +dotnet_diagnostic.rcs1005.severity = silent + +# Merge 'else' with nested 'if' +dotnet_diagnostic.rcs1006.severity = silent + +# Add braces +dotnet_diagnostic.rcs1007.severity = none + +# Use predefined type +dotnet_diagnostic.rcs1013.severity = none + +# Use explicitly/implicitly typed array +dotnet_diagnostic.rcs1014.severity = error +# Options: roslynator_array_creation_type_style, roslynator_use_collection_expression + +# Use nameof operator +dotnet_diagnostic.rcs1015.severity = warning + +# Use block body or expression body +dotnet_diagnostic.rcs1016.severity = none +# Options: roslynator_body_style, roslynator_use_block_body_when_declaration_spans_over_multiple_lines, roslynator_use_block_body_when_expression_spans_over_multiple_lines + +# Add/remove accessibility modifiers +dotnet_diagnostic.rcs1018.severity = none +# Options: roslynator_accessibility_modifiers + +# Order modifiers +dotnet_diagnostic.rcs1019.severity = none + +# Simplify Nullable to T? +dotnet_diagnostic.rcs1020.severity = error + +# Convert lambda expression body to expression body +dotnet_diagnostic.rcs1021.severity = warning + +# Remove unnecessary braces in switch section +dotnet_diagnostic.rcs1031.severity = silent + +# Remove redundant parentheses +dotnet_diagnostic.rcs1032.severity = warning + +# Remove redundant boolean literal +dotnet_diagnostic.rcs1033.severity = error + +# Remove redundant 'sealed' modifier +dotnet_diagnostic.rcs1034.severity = silent + +# Remove unnecessary blank line +dotnet_diagnostic.rcs1036.severity = error +# Options: roslynator_blank_line_between_closing_brace_and_switch_section + +# Remove trailing white-space +dotnet_diagnostic.rcs1037.severity = warning + +# Remove argument list from attribute +dotnet_diagnostic.rcs1039.severity = silent + +# Remove enum default underlying type +dotnet_diagnostic.rcs1042.severity = silent + +# Remove 'partial' modifier from type with a single part +dotnet_diagnostic.rcs1043.severity = silent + +# Remove original exception from throw statement +dotnet_diagnostic.rcs1044.severity = warning + +# Asynchronous method name should end with 'Async' +dotnet_diagnostic.rcs1046.severity = none + +# Non-asynchronous method name should not end with 'Async' +dotnet_diagnostic.rcs1047.severity = error + +# Use lambda expression instead of anonymous method +dotnet_diagnostic.rcs1048.severity = error + +# Simplify boolean comparison +dotnet_diagnostic.rcs1049.severity = error + +# Include/omit parentheses when creating new object +dotnet_diagnostic.rcs1050.severity = none +# Options: roslynator_object_creation_parentheses_style + +# Add/remove parentheses from condition in conditional operator +dotnet_diagnostic.rcs1051.severity = none +# Options: roslynator_conditional_operator_condition_parentheses_style + +# Declare each attribute separately +dotnet_diagnostic.rcs1052.severity = none + +# Unnecessary semicolon at the end of declaration +dotnet_diagnostic.rcs1055.severity = silent + +# Avoid usage of using alias directive +dotnet_diagnostic.rcs1056.severity = none + +# Use compound assignment +dotnet_diagnostic.rcs1058.severity = error + +# Avoid locking on publicly accessible instance +dotnet_diagnostic.rcs1059.severity = warning + +# Declare each type in separate file +dotnet_diagnostic.rcs1060.severity = none + +# Merge 'if' with nested 'if' +dotnet_diagnostic.rcs1061.severity = silent + +# Simplify logical negation +dotnet_diagnostic.rcs1068.severity = error + +# Remove unnecessary case label +dotnet_diagnostic.rcs1069.severity = silent + +# Remove redundant default switch section +dotnet_diagnostic.rcs1070.severity = silent + +# Remove redundant base constructor call +dotnet_diagnostic.rcs1071.severity = silent + +# Convert 'if' to 'return' statement +dotnet_diagnostic.rcs1073.severity = error + +# Remove redundant constructor +dotnet_diagnostic.rcs1074.severity = silent + +# Avoid empty catch clause that catches System.Exception +dotnet_diagnostic.rcs1075.severity = warning + +# Optimize LINQ method call +dotnet_diagnostic.rcs1077.severity = error + +# Use "" or 'string.Empty' +dotnet_diagnostic.rcs1078.severity = none +# Options: roslynator_empty_string_style + +# Throwing of new NotImplementedException +dotnet_diagnostic.rcs1079.severity = none + +# Use 'Count/Length' property instead of 'Any' method +dotnet_diagnostic.rcs1080.severity = none + +# Split variable declaration +dotnet_diagnostic.rcs1081.severity = none + +# Use coalesce expression instead of conditional expression +dotnet_diagnostic.rcs1084.severity = error + +# Use auto-implemented property +dotnet_diagnostic.rcs1085.severity = error + +# Use --/++ operator instead of assignment +dotnet_diagnostic.rcs1089.severity = error + +# Add/remove 'ConfigureAwait(false)' call +dotnet_diagnostic.rcs1090.severity = none +# Options: roslynator_configure_await + +# File contains no code +dotnet_diagnostic.rcs1093.severity = error + +# Declare using directive on top level +dotnet_diagnostic.rcs1094.severity = none + +# Use 'HasFlag' method or bitwise operator +dotnet_diagnostic.rcs1096.severity = none +# Options: roslynator_enum_has_flag_style + +# Remove redundant 'ToString' call +dotnet_diagnostic.rcs1097.severity = error + +# Constant values should be placed on right side of comparisons +dotnet_diagnostic.rcs1098.severity = error + +# Default label should be the last label in a switch section +dotnet_diagnostic.rcs1099.severity = error + +# Make class static +dotnet_diagnostic.rcs1102.severity = warning + +# Convert 'if' to assignment +dotnet_diagnostic.rcs1103.severity = error + +# Simplify conditional expression +dotnet_diagnostic.rcs1104.severity = error + +# Unnecessary interpolation +dotnet_diagnostic.rcs1105.severity = error + +# Remove redundant 'ToCharArray' call +dotnet_diagnostic.rcs1107.severity = error + +# Add 'static' modifier to all partial class declarations +dotnet_diagnostic.rcs1108.severity = error + +# Declare type inside namespace +dotnet_diagnostic.rcs1110.severity = suggestion + +# Add braces to switch section with multiple statements +dotnet_diagnostic.rcs1111.severity = none + +# Combine 'Enumerable.Where' method chain +dotnet_diagnostic.rcs1112.severity = error + +# Use 'string.IsNullOrEmpty' method +dotnet_diagnostic.rcs1113.severity = error + +# Remove redundant delegate creation +dotnet_diagnostic.rcs1114.severity = error + +# Mark local variable as const +dotnet_diagnostic.rcs1118.severity = none + +# Add parentheses when necessary +dotnet_diagnostic.rcs1123.severity = none + +# Inline local variable +dotnet_diagnostic.rcs1124.severity = none + +# Add braces to if-else +dotnet_diagnostic.rcs1126.severity = none + +# Use coalesce expression +dotnet_diagnostic.rcs1128.severity = error + +# Remove redundant field initialization +dotnet_diagnostic.rcs1129.severity = silent + +# Bitwise operation on enum without Flags attribute +dotnet_diagnostic.rcs1130.severity = error + +# Remove redundant overriding member +dotnet_diagnostic.rcs1132.severity = error + +# Remove redundant Dispose/Close call +dotnet_diagnostic.rcs1133.severity = silent + +# Remove redundant statement +dotnet_diagnostic.rcs1134.severity = silent + +# Declare enum member with zero value (when enum has FlagsAttribute) +dotnet_diagnostic.rcs1135.severity = error + +# Merge switch sections with equivalent content +dotnet_diagnostic.rcs1136.severity = silent + +# Add summary to documentation comment +dotnet_diagnostic.rcs1138.severity = warning + +# Add summary element to documentation comment +dotnet_diagnostic.rcs1139.severity = suggestion + +# Add exception to documentation comment +dotnet_diagnostic.rcs1140.severity = silent + +# Add 'param' element to documentation comment +dotnet_diagnostic.rcs1141.severity = silent + +# Add 'typeparam' element to documentation comment +dotnet_diagnostic.rcs1142.severity = silent + +# Simplify coalesce expression +dotnet_diagnostic.rcs1143.severity = silent + +# Remove redundant 'as' operator +dotnet_diagnostic.rcs1145.severity = silent + +# Use conditional access +dotnet_diagnostic.rcs1146.severity = error + +# Remove redundant cast +dotnet_diagnostic.rcs1151.severity = silent + +# Sort enum members +dotnet_diagnostic.rcs1154.severity = error + +# Use StringComparison when comparing strings +dotnet_diagnostic.rcs1155.severity = warning + +# Use string.Length instead of comparison with empty string +dotnet_diagnostic.rcs1156.severity = error + +# Composite enum value contains undefined flag +dotnet_diagnostic.rcs1157.severity = error + +# Static member in generic type should use a type parameter +dotnet_diagnostic.rcs1158.severity = error + +# Use EventHandler +dotnet_diagnostic.rcs1159.severity = error + +# Abstract type should not have public constructors +dotnet_diagnostic.rcs1160.severity = error + +# Enum should declare explicit values +dotnet_diagnostic.rcs1161.severity = silent + +# Avoid chain of assignments +dotnet_diagnostic.rcs1162.severity = none + +# Unused parameter +dotnet_diagnostic.rcs1163.severity = none + +# Unused type parameter +dotnet_diagnostic.rcs1164.severity = error + +# Unconstrained type parameter checked for null +dotnet_diagnostic.rcs1165.severity = silent + +# Value type object is never equal to null +dotnet_diagnostic.rcs1166.severity = error + +# Parameter name differs from base name +dotnet_diagnostic.rcs1168.severity = silent + +# Make field read-only +dotnet_diagnostic.rcs1169.severity = error +# Options: roslynator_unity_code_analysis.enabled + +# Use read-only auto-implemented property +dotnet_diagnostic.rcs1170.severity = error + +# Simplify lazy initialization +dotnet_diagnostic.rcs1171.severity = error + +# Use 'is' operator instead of 'as' operator +dotnet_diagnostic.rcs1172.severity = warning + +# Use coalesce expression instead of 'if' +dotnet_diagnostic.rcs1173.severity = error + +# Remove redundant async/await +dotnet_diagnostic.rcs1174.severity = none + +# Unused 'this' parameter +dotnet_diagnostic.rcs1175.severity = error + +# Unnecessary assignment +dotnet_diagnostic.rcs1179.severity = error + +# Inline lazy initialization +dotnet_diagnostic.rcs1180.severity = error + +# Convert comment to documentation comment +dotnet_diagnostic.rcs1181.severity = silent + +# Remove redundant base interface +dotnet_diagnostic.rcs1182.severity = silent + +# Use Regex instance instead of static method +dotnet_diagnostic.rcs1186.severity = silent + +# Use constant instead of field +dotnet_diagnostic.rcs1187.severity = error + +# Remove redundant auto-property initialization +dotnet_diagnostic.rcs1188.severity = silent + +# Add or remove region name +dotnet_diagnostic.rcs1189.severity = silent + +# Join string expressions +dotnet_diagnostic.rcs1190.severity = error + +# Declare enum value as combination of names +dotnet_diagnostic.rcs1191.severity = error + +# Unnecessary usage of verbatim string literal +dotnet_diagnostic.rcs1192.severity = warning + +# Overriding member should not change 'params' modifier +dotnet_diagnostic.rcs1193.severity = warning + +# Implement exception constructors +dotnet_diagnostic.rcs1194.severity = none + +# Use ^ operator +dotnet_diagnostic.rcs1195.severity = error + +# Call extension method as instance method +dotnet_diagnostic.rcs1196.severity = warning + +# Optimize StringBuilder.Append/AppendLine call +dotnet_diagnostic.rcs1197.severity = error + +# Avoid unnecessary boxing of value type +dotnet_diagnostic.rcs1198.severity = none + +# Unnecessary null check +dotnet_diagnostic.rcs1199.severity = error + +# Call 'Enumerable.ThenBy' instead of 'Enumerable.OrderBy' +dotnet_diagnostic.rcs1200.severity = error + +# Use method chaining +dotnet_diagnostic.rcs1201.severity = silent + +# Avoid NullReferenceException +dotnet_diagnostic.rcs1202.severity = error + +# Use AttributeUsageAttribute +dotnet_diagnostic.rcs1203.severity = warning + +# Use EventArgs.Empty +dotnet_diagnostic.rcs1204.severity = error + +# Order named arguments according to the order of parameters +dotnet_diagnostic.rcs1205.severity = error + +# Use conditional access instead of conditional expression +dotnet_diagnostic.rcs1206.severity = error + +# Use anonymous function or method group +dotnet_diagnostic.rcs1207.severity = none +# Options: roslynator_use_anonymous_function_or_method_group + +# Reduce 'if' nesting +dotnet_diagnostic.rcs1208.severity = none + +# Order type parameter constraints +dotnet_diagnostic.rcs1209.severity = error + +# Return completed task instead of returning null +dotnet_diagnostic.rcs1210.severity = warning + +# Remove unnecessary 'else' +dotnet_diagnostic.rcs1211.severity = silent + +# Remove redundant assignment +dotnet_diagnostic.rcs1212.severity = suggestion + +# Remove unused member declaration +dotnet_diagnostic.rcs1213.severity = suggestion +# Options: roslynator_unity_code_analysis.enabled + +# Unnecessary interpolated string +dotnet_diagnostic.rcs1214.severity = error + +# Expression is always equal to true/false +dotnet_diagnostic.rcs1215.severity = warning + +# Unnecessary unsafe context +dotnet_diagnostic.rcs1216.severity = error + +# Convert interpolated string to concatenation +dotnet_diagnostic.rcs1217.severity = silent + +# Simplify code branching +dotnet_diagnostic.rcs1218.severity = error + +# Use pattern matching instead of combination of 'is' operator and cast operator +dotnet_diagnostic.rcs1220.severity = error + +# Use pattern matching instead of combination of 'as' operator and null check +dotnet_diagnostic.rcs1221.severity = error + +# Merge preprocessor directives +dotnet_diagnostic.rcs1222.severity = error + +# Mark publicly visible type with DebuggerDisplay attribute +dotnet_diagnostic.rcs1223.severity = none + +# Make method an extension method +dotnet_diagnostic.rcs1224.severity = error + +# Make class sealed +dotnet_diagnostic.rcs1225.severity = error + +# Add paragraph to documentation comment +dotnet_diagnostic.rcs1226.severity = none + +# Validate arguments correctly +dotnet_diagnostic.rcs1227.severity = error + +# Unused element in a documentation comment +dotnet_diagnostic.rcs1228.severity = silent + +# Use async/await when necessary +dotnet_diagnostic.rcs1229.severity = error + +# Unnecessary explicit use of enumerator +dotnet_diagnostic.rcs1230.severity = error + +# Make parameter ref read-only +dotnet_diagnostic.rcs1231.severity = none + +# Order elements in documentation comment +dotnet_diagnostic.rcs1232.severity = error + +# Use short-circuiting operator +dotnet_diagnostic.rcs1233.severity = error + +# Duplicate enum value +dotnet_diagnostic.rcs1234.severity = error + +# Optimize method call +dotnet_diagnostic.rcs1235.severity = error + +# Use exception filter +dotnet_diagnostic.rcs1236.severity = error + +# Avoid nested ?: operators +dotnet_diagnostic.rcs1238.severity = silent + +# Use 'for' statement instead of 'while' statement +dotnet_diagnostic.rcs1239.severity = error + +# Operator is unnecessary +dotnet_diagnostic.rcs1240.severity = suggestion + +# Implement non-generic counterpart +dotnet_diagnostic.rcs1241.severity = silent + +# Do not pass non-read-only struct by read-only reference +dotnet_diagnostic.rcs1242.severity = none + +# Duplicate word in a comment +dotnet_diagnostic.rcs1243.severity = error + +# Simplify 'default' expression +dotnet_diagnostic.rcs1244.severity = silent + +# Use element access +dotnet_diagnostic.rcs1246.severity = error + +# Fix documentation comment tag +dotnet_diagnostic.rcs1247.severity = error + +# Normalize null check +dotnet_diagnostic.rcs1248.severity = none +# Options: roslynator_null_check_style + +# Unnecessary null-forgiving operator +dotnet_diagnostic.rcs1249.severity = none + +# Use implicit/explicit object creation +dotnet_diagnostic.rcs1250.severity = none +# Options: roslynator_object_creation_type_style, roslynator_use_collection_expression, roslynator_use_var_instead_of_implicit_object_creation + +# Remove unnecessary braces from record declaration +dotnet_diagnostic.rcs1251.severity = error + +# Normalize usage of infinite loop +dotnet_diagnostic.rcs1252.severity = none +# Options: roslynator_infinite_loop_style + +# Format documentation comment summary +dotnet_diagnostic.rcs1253.severity = none +# Options: roslynator_doc_comment_summary_style + +# Normalize format of enum flag value +dotnet_diagnostic.rcs1254.severity = none +# Options: roslynator_enum_flag_value_style + +# Simplify argument null check +dotnet_diagnostic.rcs1255.severity = warning + +# Invalid argument null check +dotnet_diagnostic.rcs1256.severity = warning + +# Use enum field explicitly +dotnet_diagnostic.rcs1257.severity = error + +# Unnecessary enum flag +dotnet_diagnostic.rcs1258.severity = error + +# Remove empty syntax +dotnet_diagnostic.rcs1259.severity = error + +# Add/remove trailing comma +dotnet_diagnostic.rcs1260.severity = error +# Options: roslynator_trailing_comma_style + +# Resource can be disposed asynchronously +dotnet_diagnostic.rcs1261.severity = error + +# Unnecessary raw string literal +dotnet_diagnostic.rcs1262.severity = error + +# Invalid reference in a documentation comment +dotnet_diagnostic.rcs1263.severity = warning + +# Use 'var' or explicit type +dotnet_diagnostic.rcs1264.severity = none +# Options: roslynator_use_var + +# Remove redundant catch block +dotnet_diagnostic.rcs1265.severity = error + +# Use raw string literal +dotnet_diagnostic.rcs1266.severity = error + +# Use string interpolation instead of 'string.Concat' +dotnet_diagnostic.rcs1267.severity = error + +# Simplify numeric comparison +dotnet_diagnostic.rcs1268.severity = error + +# Use pattern matching +dotnet_diagnostic.rcs9001.severity = silent + +# Use property SyntaxNode.SpanStart +dotnet_diagnostic.rcs9002.severity = error + +# Unnecessary conditional access +dotnet_diagnostic.rcs9003.severity = error + +# Call 'Any' instead of accessing 'Count' +dotnet_diagnostic.rcs9004.severity = none + +# Unnecessary null check +dotnet_diagnostic.rcs9005.severity = error + +# Use element access +dotnet_diagnostic.rcs9006.severity = error + +# Use return value +dotnet_diagnostic.rcs9007.severity = warning + +# Call 'Last' instead of using [] +dotnet_diagnostic.rcs9008.severity = none + +# Unknown language name +dotnet_diagnostic.rcs9009.severity = warning + +# Specify ExportCodeRefactoringProviderAttribute.Name +dotnet_diagnostic.rcs9010.severity = silent + +# Specify ExportCodeFixProviderAttribute.Name +dotnet_diagnostic.rcs9011.severity = silent diff --git a/src/Directory.Build.props b/src/Directory.Build.props index 8bdff634..4a5cd026 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -23,16 +23,22 @@ true true + true + $(NoWarn);CS1591;RCS1138;CS1668 latest true true true true - CS1591;CS1668 + + + + + diff --git a/src/SharpLearning.AdaBoost.Test/DataSetUtilities.cs b/src/SharpLearning.AdaBoost.Test/DataSetUtilities.cs index 6465ff09..90fb31e6 100644 --- a/src/SharpLearning.AdaBoost.Test/DataSetUtilities.cs +++ b/src/SharpLearning.AdaBoost.Test/DataSetUtilities.cs @@ -267,5 +267,4 @@ public static (F64Matrix observations, double[] targets) LoadGlassDataSet() 1.52065;14.36;0;2.02;73.42;0;8.44;1.64;0;7 1.51651;14.38;0;1.94;73.61;0;8.48;1.57;0;7 1.51711;14.23;0;2.08;73.36;0;8.62;1.67;0;7"; - } diff --git a/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs b/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs index c1a177d4..c8ee3a66 100644 --- a/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs +++ b/src/SharpLearning.AdaBoost.Test/Models/ClassificationAdaBoostModelTest.cs @@ -72,7 +72,7 @@ public void ClassificationAdaBoostModel_PredictProbability_Single() Assert.AreEqual(0.038461538461538464, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.553917222019051 }, { 1, 0.446082777980949 }, }), new(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary { { 0, 0.564961572849738 }, { 1, 0.435038427150263 }, }), new(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(1, new Dictionary { { 0, 0.417527839140627 }, { 1, 0.582472160859373 }, }), new(1, new Dictionary { { 0, 0.409988559960094 }, { 1, 0.590011440039906 }, }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary { { 0, 0.461264944069783 }, { 1, 0.538735055930217 }, }), new(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary { { 0, 0.549503146925505 }, { 1, 0.450496853074495 }, }), new(0, new Dictionary { { 0, 0.537653803214063 }, { 1, 0.462346196785938 }, }), new(1, new Dictionary { { 0, 0.37650723540928 }, { 1, 0.62349276459072 }, }), new(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary { { 0, 0.524371409810479 }, { 1, 0.475628590189522 }, }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary { { 0, 0.471117379964633 }, { 1, 0.528882620035367 }, }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary { { 0, 0.404976804073458 }, { 1, 0.595023195926542 }, }), new(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.553917222019051 }, { 1, 0.446082777980949 } }), new(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 } }), new(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 } }), new(0, new Dictionary { { 0, 0.564961572849738 }, { 1, 0.435038427150263 } }), new(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 } }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(1, new Dictionary { { 0, 0.417527839140627 }, { 1, 0.582472160859373 } }), new(1, new Dictionary { { 0, 0.409988559960094 }, { 1, 0.590011440039906 } }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary { { 0, 0.461264944069783 }, { 1, 0.538735055930217 } }), new(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 } }), new(0, new Dictionary { { 0, 0.549503146925505 }, { 1, 0.450496853074495 } }), new(0, new Dictionary { { 0, 0.537653803214063 }, { 1, 0.462346196785938 } }), new(1, new Dictionary { { 0, 0.37650723540928 }, { 1, 0.62349276459072 } }), new(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 } }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(0, new Dictionary { { 0, 0.524371409810479 }, { 1, 0.475628590189522 } }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary { { 0, 0.471117379964633 }, { 1, 0.528882620035367 } }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary { { 0, 0.404976804073458 }, { 1, 0.595023195926542 } }), new(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 } }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }) }; CollectionAssert.AreEqual(expected, actual); } @@ -90,7 +90,7 @@ public void ClassificationAdaBoostModel_PredictProbability_Multiple() Assert.AreEqual(0.038461538461538464, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.553917222019051 }, { 1, 0.446082777980949 }, }), new(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary { { 0, 0.564961572849738 }, { 1, 0.435038427150263 }, }), new(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 }, }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(1, new Dictionary { { 0, 0.417527839140627 }, { 1, 0.582472160859373 }, }), new(1, new Dictionary { { 0, 0.409988559960094 }, { 1, 0.590011440039906 }, }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary { { 0, 0.461264944069783 }, { 1, 0.538735055930217 }, }), new(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 }, }), new(0, new Dictionary { { 0, 0.549503146925505 }, { 1, 0.450496853074495 }, }), new(0, new Dictionary { { 0, 0.537653803214063 }, { 1, 0.462346196785938 }, }), new(1, new Dictionary { { 0, 0.37650723540928 }, { 1, 0.62349276459072 }, }), new(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary { { 0, 0.524371409810479 }, { 1, 0.475628590189522 }, }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary { { 0, 0.471117379964633 }, { 1, 0.528882620035367 }, }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 }, }), new(1, new Dictionary { { 0, 0.404976804073458 }, { 1, 0.595023195926542 }, }), new(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 }, }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 }, }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.553917222019051 }, { 1, 0.446082777980949 } }), new(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 } }), new(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 } }), new(0, new Dictionary { { 0, 0.564961572849738 }, { 1, 0.435038427150263 } }), new(1, new Dictionary { { 0, 0.455270122123639 }, { 1, 0.544729877876361 } }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(1, new Dictionary { { 0, 0.417527839140627 }, { 1, 0.582472160859373 } }), new(1, new Dictionary { { 0, 0.409988559960094 }, { 1, 0.590011440039906 } }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary { { 0, 0.461264944069783 }, { 1, 0.538735055930217 } }), new(0, new Dictionary { { 0, 0.590671208378385 }, { 1, 0.409328791621616 } }), new(0, new Dictionary { { 0, 0.549503146925505 }, { 1, 0.450496853074495 } }), new(0, new Dictionary { { 0, 0.537653803214063 }, { 1, 0.462346196785938 } }), new(1, new Dictionary { { 0, 0.37650723540928 }, { 1, 0.62349276459072 } }), new(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 } }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(0, new Dictionary { { 0, 0.524371409810479 }, { 1, 0.475628590189522 } }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary { { 0, 0.471117379964633 }, { 1, 0.528882620035367 } }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }), new(1, new Dictionary { { 0, 0.436954866525023 }, { 1, 0.563045133474978 } }), new(1, new Dictionary { { 0, 0.404976804073458 }, { 1, 0.595023195926542 } }), new(0, new Dictionary { { 0, 0.573579890413618 }, { 1, 0.426420109586382 } }), new(0, new Dictionary { { 0, 0.549970403132686 }, { 1, 0.450029596867314 } }), new(0, new Dictionary { { 0, 0.630894242807786 }, { 1, 0.369105757192214 } }) }; CollectionAssert.AreEqual(expected, actual); } @@ -100,14 +100,14 @@ public void ClassificationAdaBoostModel_GetVariableImportance() var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, - { "PreviousExperience_month", 1 } }; + { "PreviousExperience_month", 1 }, }; var learner = new ClassificationAdaBoostLearner(10, 1, 3); var sut = learner.Learn(observations, targets); var actual = sut.GetVariableImportance(featureNameToIndex); var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, - { "AptitudeTestScore", 24.0268096428771 } }; + { "AptitudeTestScore", 24.0268096428771 }, }; Assert.AreEqual(expected.Count, actual.Count); var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a }); diff --git a/src/SharpLearning.AdaBoost.Test/Models/RegressionAdaBoostModelTest.cs b/src/SharpLearning.AdaBoost.Test/Models/RegressionAdaBoostModelTest.cs index 72189d82..bb10ade2 100644 --- a/src/SharpLearning.AdaBoost.Test/Models/RegressionAdaBoostModelTest.cs +++ b/src/SharpLearning.AdaBoost.Test/Models/RegressionAdaBoostModelTest.cs @@ -56,14 +56,14 @@ public void RegressionAdaBoostModel_GetVariableImportance() var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, - { "PreviousExperience_month", 1 } }; + { "PreviousExperience_month", 1 }, }; var learner = new RegressionAdaBoostLearner(10); var sut = learner.Learn(observations, targets); var actual = sut.GetVariableImportance(featureNameToIndex); var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, - { "AptitudeTestScore", 33.8004886838701 } }; + { "AptitudeTestScore", 33.8004886838701 }, }; Assert.AreEqual(expected.Count, actual.Count); var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a }); diff --git a/src/SharpLearning.AdaBoost/Learners/AdaBoostRegressionLoss.cs b/src/SharpLearning.AdaBoost/Learners/AdaBoostRegressionLoss.cs index bb1acd8d..9f1a47e9 100644 --- a/src/SharpLearning.AdaBoost/Learners/AdaBoostRegressionLoss.cs +++ b/src/SharpLearning.AdaBoost/Learners/AdaBoostRegressionLoss.cs @@ -18,5 +18,5 @@ public enum AdaBoostRegressionLoss /// /// Exponential loss /// - Exponential + Exponential, } diff --git a/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs b/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs index 98948750..320fe43e 100644 --- a/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs +++ b/src/SharpLearning.AdaBoost/Learners/ClassificationAdaBoostLearner.cs @@ -49,8 +49,8 @@ public sealed class ClassificationAdaBoostLearner /// /// Number of iterations (models) to boost /// How much each boost iteration should add (between 1.0 and 0.0) - /// The maximum depth of the tree models. - /// for 2 class problem 1 is usually enough. For more classes or larger problems between 3 to 8 is recommended. + /// The maximum depth of the tree models. + /// for 2 class problem 1 is usually enough. For more classes or larger problems between 3 to 8 is recommended. /// 0 will set the depth equal to the number of classes in the problem /// minimum node split size in the trees 1 is default /// The minimum improvement in information gain before a split is made @@ -139,19 +139,27 @@ public ClassificationAdaBoostModel Learn(F64Matrix observations, double[] target for (var i = 0; i < m_iterations; i++) { if (!Boost(observations, targets, indices, i)) + { break; + } var ensembleError = ErrorEstimate(observations, indices); if (ensembleError == 0.0) + { break; + } if (m_modelErrors[i] == 0.0) + { break; + } var weightSum = m_sampleWeights.Sum(indices); if (weightSum <= 0.0) + { break; + } if (i == m_iterations - 1) { @@ -159,7 +167,7 @@ public ClassificationAdaBoostModel Learn(F64Matrix observations, double[] target for (var j = 0; j < indices.Length; j++) { var index = indices[j]; - m_sampleWeights[index] = m_sampleWeights[index] / weightSum; + m_sampleWeights[index] /= weightSum; } } } diff --git a/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs b/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs index 99cbcda0..e19ced42 100644 --- a/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs +++ b/src/SharpLearning.AdaBoost/Learners/RegressionAdaBoostLearner.cs @@ -14,7 +14,7 @@ namespace SharpLearning.AdaBoost.Learners; /// -/// Regression AdaBoost learner using the R2 algorithm +/// Regression AdaBoost learner using the R2 algorithm /// using weighted sampling to target the observations with largest error and /// weighted median to ensemble the models. /// @@ -44,13 +44,13 @@ public sealed class RegressionAdaBoostLearner : IIndexedLearner, ILearne readonly WeightedRandomSampler m_sampler; /// - /// Regression AdaBoost learner using the R2 algorithm + /// Regression AdaBoost learner using the R2 algorithm /// using weighted sampling to target the observations with largest error and /// weighted median to ensemble the models. /// /// Number of iterations (models) to boost /// How much each boost iteration should add (between 1.0 and 0.0) - /// The maximum depth of the tree models. + /// The maximum depth of the tree models. /// 0 will set the depth to default 3 /// Type of loss used when boosting weights. Linear is default /// minimum node split size in the trees 1 is default @@ -137,19 +137,27 @@ public RegressionAdaBoostModel Learn(F64Matrix observations, double[] targets, for (var i = 0; i < m_iterations; i++) { if (!Boost(observations, targets, indices, i)) + { break; + } var ensembleError = ErrorEstimate(observations, indices); if (ensembleError == 0.0) + { break; + } if (m_modelErrors[i] == 0.0) + { break; + } var weightSum = m_sampleWeights.Sum(indices); if (weightSum <= 0.0) + { break; + } if (i == m_iterations - 1) { @@ -157,7 +165,7 @@ public RegressionAdaBoostModel Learn(F64Matrix observations, double[] targets, for (var j = 0; j < indices.Length; j++) { var index = indices[j]; - m_sampleWeights[index] = m_sampleWeights[index] / weightSum; + m_sampleWeights[index] /= weightSum; } } } @@ -195,7 +203,6 @@ bool Boost(F64Matrix observations, double[] targets, int[] indices, int iteratio var model = m_modelLearner.Learn(observations, targets, m_sampleIndices); // weighted sampling is used instead of weights in training - var predictions = model.Predict(observations, indices); for (var i = 0; i < predictions.Length; i++) @@ -208,7 +215,6 @@ bool Boost(F64Matrix observations, double[] targets, int[] indices, int iteratio for (var i = 0; i < m_workErrors.Length; i++) { - var error = m_workErrors[i]; if (maxError != 0.0) diff --git a/src/SharpLearning.AdaBoost/WeightedRandomSampler.cs b/src/SharpLearning.AdaBoost/WeightedRandomSampler.cs index 8351cf46..656fb688 100644 --- a/src/SharpLearning.AdaBoost/WeightedRandomSampler.cs +++ b/src/SharpLearning.AdaBoost/WeightedRandomSampler.cs @@ -47,7 +47,7 @@ public void Sample(int[] indices, double[] weights, int[] outIndices) var totalWeight = weights.Sum(indices); var i = 0; - var index = indices.First(); + var index = indices[0]; var weight = weights[index]; var samples = outIndices.Length; @@ -55,18 +55,18 @@ public void Sample(int[] indices, double[] weights, int[] outIndices) while (samples > 0) { - var x = totalWeight * (1.0 - Math.Pow(m_random.NextDouble(), (1.0 / samples))); + var x = totalWeight * (1.0 - Math.Pow(m_random.NextDouble(), 1.0 / samples)); totalWeight -= x; while (x > weight) { x -= weight; - i += 1; + i++; index = indices[i]; weight = weights[index]; } weight -= x; outIndices[current++] = index; - samples -= 1; + samples--; } } } diff --git a/src/SharpLearning.Common.Interfaces/IIndexedLearner.cs b/src/SharpLearning.Common.Interfaces/IIndexedLearner.cs index 57d9831e..d618de62 100644 --- a/src/SharpLearning.Common.Interfaces/IIndexedLearner.cs +++ b/src/SharpLearning.Common.Interfaces/IIndexedLearner.cs @@ -3,7 +3,7 @@ namespace SharpLearning.Common.Interfaces; /// -/// Interface for indexed learner. +/// Interface for indexed learner. /// Only the observations from the provided indices in the index array will be used for training /// /// The prediction type of the resulting model. diff --git a/src/SharpLearning.Common.Interfaces/ILearner.cs b/src/SharpLearning.Common.Interfaces/ILearner.cs index f9465bc9..d0edc9df 100644 --- a/src/SharpLearning.Common.Interfaces/ILearner.cs +++ b/src/SharpLearning.Common.Interfaces/ILearner.cs @@ -3,16 +3,10 @@ namespace SharpLearning.Common.Interfaces; /// -/// General interface for learner. +/// General interface for learner. /// /// public interface ILearner { - /// - /// - /// - /// - /// - /// IPredictorModel Learn(F64Matrix observations, double[] targets); } diff --git a/src/SharpLearning.Common.Interfaces/IPredictor.cs b/src/SharpLearning.Common.Interfaces/IPredictor.cs index 0574ba64..0bdad953 100644 --- a/src/SharpLearning.Common.Interfaces/IPredictor.cs +++ b/src/SharpLearning.Common.Interfaces/IPredictor.cs @@ -4,7 +4,7 @@ namespace SharpLearning.Common.Interfaces; /// -/// General interface for predictor. +/// General interface for predictor. /// /// The prediction type of the resulting model. public interface IPredictor diff --git a/src/SharpLearning.Common.Interfaces/IPredictorModel.cs b/src/SharpLearning.Common.Interfaces/IPredictorModel.cs index 7a3c0712..7da2a517 100644 --- a/src/SharpLearning.Common.Interfaces/IPredictorModel.cs +++ b/src/SharpLearning.Common.Interfaces/IPredictorModel.cs @@ -4,6 +4,4 @@ /// Interface for predictor models. Supports prediction and variable importance. /// /// -public interface IPredictorModel : IPredictor, IModelVariableImportance -{ -} +public interface IPredictorModel : IPredictor, IModelVariableImportance; diff --git a/src/SharpLearning.Containers.Test/Arithmetic/MatrixAddTest.cs b/src/SharpLearning.Containers.Test/Arithmetic/MatrixAddTest.cs index 1d3cf568..3725bac4 100644 --- a/src/SharpLearning.Containers.Test/Arithmetic/MatrixAddTest.cs +++ b/src/SharpLearning.Containers.Test/Arithmetic/MatrixAddTest.cs @@ -65,5 +65,4 @@ public void MatrixAdd_Add_Vectors_Different_Lengths() v1.Add(v2); } - } diff --git a/src/SharpLearning.Containers.Test/ArrayExtensionsTest.cs b/src/SharpLearning.Containers.Test/ArrayExtensionsTest.cs index 09182d0f..a702a0ec 100644 --- a/src/SharpLearning.Containers.Test/ArrayExtensionsTest.cs +++ b/src/SharpLearning.Containers.Test/ArrayExtensionsTest.cs @@ -396,7 +396,6 @@ public void ArrayExtensions_StratifiedIndexSampling_SampleSize_Too_large() values.StratifiedIndexSampling(sampleSize, new Random(42)); } - [TestMethod] [ExpectedException(typeof(ArgumentException))] public void ArrayExtensions_StratifiedIndexSampling_SampleSize_Too_Small() diff --git a/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs b/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs index b6a366da..44a5f405 100644 --- a/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs +++ b/src/SharpLearning.Containers.Test/Matrices/F64MatrixTest.cs @@ -27,7 +27,6 @@ public void F64Matrix_At_Indexer() Assert.AreEqual(300, sut[2, 2]); } - [TestMethod] public void F64Matrix_At_Set() { @@ -50,7 +49,6 @@ public void F64Matrix_At_Set_Indexer() Assert.AreEqual(item, value); } - [TestMethod] public void F64Matrix_Row() { @@ -156,7 +154,7 @@ static F64Matrix GetExpectedColSubMatrix() { var features = new double[6] { 1, 3, 10, 30, - 100, 300}; + 100, 300,}; return new F64Matrix(features, 3, 2); } @@ -164,7 +162,7 @@ static F64Matrix GetExpectedColSubMatrix() static F64Matrix GetExpectedRowSubMatrix() { var features = new double[6] { 1, 2, 3, - 100, 200, 300}; + 100, 200, 300,}; return new F64Matrix(features, 2, 3); } @@ -173,7 +171,7 @@ static F64Matrix CreateFeatures() { var features = new double[9] { 1, 2, 3, 10, 20, 30, - 100, 200, 300}; + 100, 200, 300,}; return new F64Matrix(features, 3, 3); } diff --git a/src/SharpLearning.Containers.Test/Matrices/StringMatrixTest.cs b/src/SharpLearning.Containers.Test/Matrices/StringMatrixTest.cs index 1567a72c..9b67a595 100644 --- a/src/SharpLearning.Containers.Test/Matrices/StringMatrixTest.cs +++ b/src/SharpLearning.Containers.Test/Matrices/StringMatrixTest.cs @@ -26,7 +26,6 @@ public void StringMatrix_At_Indexer() Assert.AreEqual("300", sut[2, 2]); } - [TestMethod] public void StringMatrix_At_Set() { @@ -141,7 +140,7 @@ static StringMatrix GetExpectedColSubMatrix() { var features = new string[] { "1", "3", "10", "30", - "100", "300"}; + "100", "300",}; return new StringMatrix(features, 3, 2); } @@ -149,7 +148,7 @@ static StringMatrix GetExpectedColSubMatrix() static StringMatrix GetExpectedRowSubMatrix() { var features = new string[] { "1", "2", "3", - "100", "200", "300"}; + "100", "200", "300",}; return new StringMatrix(features, 2, 3); } @@ -158,7 +157,7 @@ static StringMatrix CreateFeatures() { var features = new string[] { "1", "2", "3", "10", "20", "30", - "100", "200", "300"}; + "100", "200", "300",}; return new StringMatrix(features, 3, 3); } diff --git a/src/SharpLearning.Containers.Test/ObservationTargetSetTest.cs b/src/SharpLearning.Containers.Test/ObservationTargetSetTest.cs index 8e308f65..e7400ce3 100644 --- a/src/SharpLearning.Containers.Test/ObservationTargetSetTest.cs +++ b/src/SharpLearning.Containers.Test/ObservationTargetSetTest.cs @@ -41,5 +41,4 @@ public void ObservationTargetSet_Targets_Null() { new ObservationTargetSet(new F64Matrix([1, 2, 3, 4], 2, 2), null); } - } diff --git a/src/SharpLearning.Containers.Test/ProbabilityPredictionTest.cs b/src/SharpLearning.Containers.Test/ProbabilityPredictionTest.cs index 98e54ba7..acac98a2 100644 --- a/src/SharpLearning.Containers.Test/ProbabilityPredictionTest.cs +++ b/src/SharpLearning.Containers.Test/ProbabilityPredictionTest.cs @@ -11,7 +11,6 @@ public class ProbabilityPredictionTest readonly ProbabilityPrediction m_notEqual1 = new(0.0, new Dictionary { { 1.0, .3 }, { 0.0, 0.8 } }); readonly ProbabilityPrediction m_notEqual2 = new(1.0, new Dictionary { { 1.0, .78 }, { 0.0, 0.22 } }); - [TestMethod] public void ProbabilityPrediction_Prediction_Equals() { diff --git a/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs b/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs index 7f0a0cd1..13db9d50 100644 --- a/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs +++ b/src/SharpLearning.Containers.Test/Views/F64MatrixViewTest.cs @@ -84,7 +84,7 @@ static F64Matrix Matrix() { var features = new double[9] { 1, 2, 3, 10, 20, 30, - 100, 200, 300}; + 100, 200, 300,}; return new F64Matrix(features, 3, 3); } @@ -97,7 +97,6 @@ static unsafe void AssertColumnView(double[] column, F64MatrixColumnView columnV } } - unsafe void AssertMatrixView(IMatrix matrix, F64MatrixView view) { for (var i = 0; i < matrix.RowCount; i++) diff --git a/src/SharpLearning.Containers/Arithmetic/MatrixAdd.cs b/src/SharpLearning.Containers/Arithmetic/MatrixAdd.cs index 6e1f2455..9249f39c 100644 --- a/src/SharpLearning.Containers/Arithmetic/MatrixAdd.cs +++ b/src/SharpLearning.Containers/Arithmetic/MatrixAdd.cs @@ -3,9 +3,6 @@ namespace SharpLearning.Containers.Arithmetic; -/// -/// -/// public static class MatrixAdd { /// diff --git a/src/SharpLearning.Containers/Arithmetic/MatrixMultiplication.cs b/src/SharpLearning.Containers/Arithmetic/MatrixMultiplication.cs index 662b1a7c..0deb9021 100644 --- a/src/SharpLearning.Containers/Arithmetic/MatrixMultiplication.cs +++ b/src/SharpLearning.Containers/Arithmetic/MatrixMultiplication.cs @@ -40,7 +40,7 @@ public static double[] MultiplyVectorF64(F64Matrix a, double[] v) } /// - /// Multiply vector v with matrix a. + /// Multiply vector v with matrix a. /// Copies output to provided array. /// /// @@ -56,7 +56,6 @@ public static void MultiplyVectorF64(F64Matrix a, double[] v, double[] output) if (cols != v.Length) { throw new ArgumentException("matrix cols: " + cols + " differs from vector length: " + v.Length); } - for (var i = 0; i < rows; ++i) { var sum = 0.0; @@ -96,7 +95,6 @@ public static double[] Multiply(this double[] v, double s) return MultiplyScalarF64(v, s); } - /// /// Multiply vector v with matrix a /// @@ -167,7 +165,6 @@ public static void MultiplyF64(F64Matrix a, F64Matrix b, F64Matrix output) if (aCols != bRows) { throw new ArgumentException("matrix a cols: " + aCols + " differs from matrix b rows: " + bRows); } - if (output.RowCount != aRows) { throw new ArgumentException("output matrix rows: " + output.RowCount @@ -192,7 +189,6 @@ public static void MultiplyF64(F64Matrix a, F64Matrix b, F64Matrix output) }); } - /// /// Multiply matrix a with matrix b /// diff --git a/src/SharpLearning.Containers/Arithmetic/MatrixSubtraction.cs b/src/SharpLearning.Containers/Arithmetic/MatrixSubtraction.cs index add29f39..09610ed4 100644 --- a/src/SharpLearning.Containers/Arithmetic/MatrixSubtraction.cs +++ b/src/SharpLearning.Containers/Arithmetic/MatrixSubtraction.cs @@ -3,9 +3,6 @@ namespace SharpLearning.Containers.Arithmetic; -/// -/// -/// public static class MatrixSubtraction { /// @@ -75,7 +72,6 @@ public static void SubtractF64(F64Matrix m1, F64Matrix m2, F64Matrix output) " differs from matrix output rows: " + outputRows); } - for (var i = 0; i < m1Rows; i++) { for (var j = 0; j < m1Cols; j++) diff --git a/src/SharpLearning.Containers/Arithmetic/MatrixTranspose.cs b/src/SharpLearning.Containers/Arithmetic/MatrixTranspose.cs index 5eb7ac34..2b13d467 100644 --- a/src/SharpLearning.Containers/Arithmetic/MatrixTranspose.cs +++ b/src/SharpLearning.Containers/Arithmetic/MatrixTranspose.cs @@ -21,7 +21,7 @@ public static F64Matrix TransposeF64(F64Matrix matrix) } /// - /// Transposes matrix. + /// Transposes matrix. /// Output is saved in the provided matrix transposed. /// /// diff --git a/src/SharpLearning.Containers/CertaintyPrediction.cs b/src/SharpLearning.Containers/CertaintyPrediction.cs index 50d1eed3..38bc0a6e 100644 --- a/src/SharpLearning.Containers/CertaintyPrediction.cs +++ b/src/SharpLearning.Containers/CertaintyPrediction.cs @@ -8,19 +8,10 @@ namespace SharpLearning.Containers; [Serializable] public struct CertaintyPrediction { - /// - /// - /// public readonly double Prediction; - /// - /// - /// public readonly double Variance; - /// - /// - /// /// /// public CertaintyPrediction(double prediction, double variance) @@ -29,34 +20,21 @@ public CertaintyPrediction(double prediction, double variance) Prediction = prediction; } - /// - /// - /// /// /// public bool Equals(CertaintyPrediction other) { if (!Equal(Prediction, other.Prediction)) { return false; } - if (!Equal(Variance, other.Variance)) { return false; } - - return true; + return Equal(Variance, other.Variance); } - /// - /// - /// /// /// public override bool Equals(object obj) { - if (obj is CertaintyPrediction) - return Equals((CertaintyPrediction)obj); - return false; + return obj is CertaintyPrediction prediction && Equals(prediction); } - /// - /// - /// /// /// /// @@ -65,9 +43,6 @@ public override bool Equals(object obj) return p1.Equals(p2); } - /// - /// - /// /// /// /// @@ -76,25 +51,17 @@ public override bool Equals(object obj) return !p1.Equals(p2); } - /// - /// - /// /// public override int GetHashCode() { return Prediction.GetHashCode() ^ Variance.GetHashCode(); } - const double m_tolerence = 0.00001; + const double Tolerence = 0.00001; static bool Equal(double a, double b) { - var diff = Math.Abs(a * m_tolerence); - if (Math.Abs(a - b) <= diff) - { - return true; - } - - return false; + var diff = Math.Abs(a * Tolerence); + return Math.Abs(a - b) <= diff; } } diff --git a/src/SharpLearning.Containers/Checks.cs b/src/SharpLearning.Containers/Checks.cs index a7732d82..5b8b6a14 100644 --- a/src/SharpLearning.Containers/Checks.cs +++ b/src/SharpLearning.Containers/Checks.cs @@ -83,7 +83,7 @@ public static void VerifyObservationsAndTargetsDimensions(int observationRowCoun { if (observationRowCount != targetLength) { - throw new ArgumentException($"Observations and targets mismatch." + + throw new ArgumentException("Observations and targets mismatch." + $"Observations row count: {observationRowCount}, targets row count: {targetLength}"); } } @@ -121,14 +121,14 @@ public static void VerifyIndices(int[] indices, int observationRowCount, int tar var min = indices.Min(); if (min < 0) { - throw new ArgumentException($"Indices contains negative " + + throw new ArgumentException("Indices contains negative " + $"values: {string.Join(",", indices.Where(v => v < 0))}"); } var max = indices.Max(); if (max >= observationRowCount || max >= targetLength) { - throw new ArgumentException($"Indices contains elements exceeding the row count of observations and targets. " + + throw new ArgumentException("Indices contains elements exceeding the row count of observations and targets. " + $"Indices Max: {max}, observations row count: {observationRowCount}, target length: {targetLength}"); } } diff --git a/src/SharpLearning.Containers/Extensions/ArrayExtensions.cs b/src/SharpLearning.Containers/Extensions/ArrayExtensions.cs index c7beeefd..72187d7f 100644 --- a/src/SharpLearning.Containers/Extensions/ArrayExtensions.cs +++ b/src/SharpLearning.Containers/Extensions/ArrayExtensions.cs @@ -6,14 +6,8 @@ namespace SharpLearning.Containers.Extensions; -/// -/// -/// public static class ArrayExtensions { - /// - /// - /// public static readonly Converter DefaultF64Converter = FloatingPointConversion.ToF64; /// @@ -70,7 +64,6 @@ public static void Map(this T[] array, Func a) } } - /// /// Converts Nan to 0.0, NegativeInfinity to double.MinValue and PositiveInfinity to double.MaxValue /// @@ -177,7 +170,7 @@ public static void SortWith(this TKey[] keys, TValues[] values) } /// - /// Sorts the keys and values based on the keys within the provided interval + /// Sorts the keys and values based on the keys within the provided interval /// /// /// @@ -332,19 +325,17 @@ public static double Median(this double[] values) return array[array.Length / 2]; } - /// - /// - /// - /// - /// - /// public static double ScoreAtPercentile(this double[] values, double percentile) { if (percentile == 1.0) + { return values.Max(); + } if (percentile == 0.0) + { return values.Min(); + } var array = new double[values.Length]; Array.Copy(values, array, values.Length); @@ -388,7 +379,7 @@ public static F64Matrix ToF64Matrix(this double[][] m) public static F64Matrix ToF64Matrix(this List m) { var rows = m.Count; - var cols = m.First().Length; + var cols = m[0].Length; var matrix = new F64Matrix(rows, cols); for (var i = 0; i < rows; i++) @@ -455,7 +446,7 @@ public static void Shuffle(this IList list, Random random) /// /// Takes a stratified sample of size sampleSize with distributions equal to the input data. /// http://en.wikipedia.org/wiki/Stratified_sampling - /// Returns a set of indices corresponding to the samples chosen. + /// Returns a set of indices corresponding to the samples chosen. /// /// /// @@ -489,7 +480,7 @@ public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, Ra var currentSampleCount = requiredSamples.ToDictionary(k => k.Key, k => 0); // might be slightly different than the specified depending on data distribution - var actualSampleSize = requiredSamples.Select(s => s.Value).Sum(); + var actualSampleSize = requiredSamples.Sum(s => s.Value); // if actual sample size is different from specified add/subtract duff from largest class if (actualSampleSize != sampleSize) @@ -518,19 +509,16 @@ public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, Ra } } - if (requiredSamples.Select(s => s.Value).Sum() != sampleSize) - { - throw new ArgumentException("Actual sample size: " + actualSampleSize + - " is different than specified sample size: " + sampleSize); - } - - return sampleIndices; + return requiredSamples.Sum(s => s.Value) != sampleSize + ? throw new ArgumentException("Actual sample size: " + actualSampleSize + + " is different than specified sample size: " + sampleSize) + : sampleIndices; } /// /// Takes a stratified sample of size sampleSize with distributions equal to the input data. /// http://en.wikipedia.org/wiki/Stratified_sampling - /// Returns a set of indices corresponding to the samples chosen. + /// Returns a set of indices corresponding to the samples chosen. /// Only samples within the indices provided in dataIndices /// /// @@ -567,7 +555,7 @@ public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, in var currentSampleCount = requiredSamples.ToDictionary(k => k.Key, k => 0); // might be slightly different than the specified depending on data distribution - var actualSampleSize = requiredSamples.Select(s => s.Value).Sum(); + var actualSampleSize = requiredSamples.Sum(s => s.Value); // if actual sample size is different from specified add/subtract difference from largest class if (actualSampleSize != sampleSize) @@ -600,12 +588,9 @@ public static int[] StratifiedIndexSampling(this T[] data, int sampleSize, in } } - if (requiredSamples.Select(s => s.Value).Sum() != sampleSize) - { - throw new ArgumentException("Actual sample size: " + actualSampleSize + - " is different than specified sample size: " + sampleSize); - } - - return sampleIndices; + return requiredSamples.Sum(s => s.Value) != sampleSize + ? throw new ArgumentException("Actual sample size: " + actualSampleSize + + " is different than specified sample size: " + sampleSize) + : sampleIndices; } } diff --git a/src/SharpLearning.Containers/FloatingPointConversion.cs b/src/SharpLearning.Containers/FloatingPointConversion.cs index ed73e8b9..15286ca9 100644 --- a/src/SharpLearning.Containers/FloatingPointConversion.cs +++ b/src/SharpLearning.Containers/FloatingPointConversion.cs @@ -3,20 +3,11 @@ namespace SharpLearning.Containers; -/// -/// -/// public static class FloatingPointConversion { - /// - /// - /// public const string DefaultFormat = "R"; - /// - /// - /// - public static readonly NumberFormatInfo nfi = new(); + public static readonly NumberFormatInfo Nfi = new(); /// /// Default NumberStyle is Any. @@ -25,10 +16,10 @@ public static class FloatingPointConversion /// /// Default format for outputting double values to string. - /// + /// public static string ToString(double value) { - return value.ToString(DefaultFormat, nfi); + return value.ToString(DefaultFormat, Nfi); } /// @@ -54,13 +45,8 @@ public static double ToF64(string value, Converter converter) static double ParseAnyNumberStyle(string value) { - if (double.TryParse(value, NumberStyle, nfi, out var result)) - { - return result; - } - else - { - throw new ArgumentException($"Unable to parse \"{value}\" to double"); - } + return double.TryParse(value, NumberStyle, Nfi, out var result) + ? result + : throw new ArgumentException($"Unable to parse \"{value}\" to double"); } } diff --git a/src/SharpLearning.Containers/Matrices/F64Matrix.cs b/src/SharpLearning.Containers/Matrices/F64Matrix.cs index 5eff738e..5115200d 100644 --- a/src/SharpLearning.Containers/Matrices/F64Matrix.cs +++ b/src/SharpLearning.Containers/Matrices/F64Matrix.cs @@ -24,14 +24,14 @@ public F64Matrix(int rows, int cols) } /// - /// Creates a matrix from the provided values with the specified rows and cols + /// Creates a matrix from the provided values with the specified rows and cols /// /// /// /// public F64Matrix(double[] values, int rows, int cols) { - if (values == null) { throw new ArgumentNullException("values"); } + if (values == null) { throw new ArgumentNullException(nameof(values)); } if (values.Length != rows * cols) { throw new ArgumentException("feature array length does not match row * cols"); } if (rows < 1) { throw new ArgumentException("matrix must have at least 1 row"); } if (cols < 1) { throw new ArgumentException("matrix must have at least 1 col"); } @@ -70,8 +70,8 @@ public void At(int row, int col, double item) /// /// Access the matrix like a 2D array /// - /// /// + /// /// public double this[int row, int col] { @@ -92,7 +92,7 @@ public double[] Row(int index) } /// - /// gets the specified row. + /// gets the specified row. /// The values are copied to the provided row array. /// /// @@ -162,7 +162,7 @@ public IMatrix Rows(params int[] indices) } /// - /// Gets the specified rows as a matrix. + /// Gets the specified rows as a matrix. /// Output is copied to the provided matrix /// /// @@ -207,7 +207,7 @@ public IMatrix Columns(params int[] indices) } /// - /// Gets the specified rows as a matrix. + /// Gets the specified rows as a matrix. /// Output is copied to the provided matrix /// /// @@ -241,13 +241,13 @@ public double[] Data() /// Gets the number of columns /// /// - public int ColumnCount { get; private set; } + public int ColumnCount { get; } /// /// Gets the number of rows /// /// - public int RowCount { get; private set; } + public int RowCount { get; } /// /// Gets a pinned pointer to the F64Matrix @@ -258,39 +258,18 @@ public F64MatrixPinnedPtr GetPinnedPointer() return new F64MatrixPinnedPtr(this); } - /// - /// - /// - /// - /// public bool Equals(F64Matrix other) { if (RowCount != other.RowCount) { return false; } if (ColumnCount != other.ColumnCount) { return false; } - if (!Data().SequenceEqual(other.Data())) { return false; } - - return true; + return Data().SequenceEqual(other.Data()); } - /// - /// - /// - /// - /// public override bool Equals(object obj) { - if (obj is F64Matrix other && Equals(other)) - { - return true; - } - - return false; + return obj is F64Matrix other && Equals(other); } - /// - /// - /// - /// public override int GetHashCode() { unchecked // Overflow is fine, just wrap diff --git a/src/SharpLearning.Containers/Matrices/F64MatrixExtensions.cs b/src/SharpLearning.Containers/Matrices/F64MatrixExtensions.cs index f3dc624f..d6881d3e 100644 --- a/src/SharpLearning.Containers/Matrices/F64MatrixExtensions.cs +++ b/src/SharpLearning.Containers/Matrices/F64MatrixExtensions.cs @@ -32,7 +32,6 @@ public static void Map(this F64Matrix matrix, Func func) matrix.Data().Map(func); } - /// /// Iterates over all elements in the matrix and applies the function to the elements. /// The values are updated directly in the Matrix. @@ -55,9 +54,8 @@ public static StringMatrix ToStringMatrix(this F64Matrix matrix) return new StringMatrix(stringFeatures, matrix.RowCount, matrix.ColumnCount); } - /// - /// Combines vector1 and vector2 column-wise. Vector2 is added to the end of vector1 + /// Combines vector1 and vector2 column-wise. Vector2 is added to the end of vector1 /// /// /// @@ -87,7 +85,7 @@ public static F64Matrix CombineCols(this double[] v1, double[] v2) } /// - /// Combines matrix and vector column-wise. Vector is added to the end of the matrix + /// Combines matrix and vector column-wise. Vector is added to the end of the matrix /// /// /// @@ -115,18 +113,17 @@ public static F64Matrix CombineCols(this F64Matrix m, double[] v) combineIndex += m.ColumnCount; Array.Copy(v, otherIndex, features, combineIndex, 1); - combineIndex += 1; + combineIndex++; } - return new F64Matrix(features, rows, cols); } /// - /// Combines vector and matrix column-wise. Matrix is added to the left of the vector + /// Combines vector and matrix column-wise. Matrix is added to the left of the vector /// - /// /// + /// /// public static F64Matrix CombineCols(this double[] v, F64Matrix m) { @@ -145,18 +142,16 @@ public static F64Matrix CombineCols(this double[] v, F64Matrix m) for (var i = 0; i < rows; i++) { Array.Copy(v, i, features, combineIndex, 1); - combineIndex += 1; + combineIndex++; var matrixIndex = i * m.ColumnCount; Array.Copy(matrixArray, matrixIndex, features, combineIndex, m.ColumnCount); combineIndex += m.ColumnCount; - } return new F64Matrix(features, rows, cols); } - /// /// Combines matrix1 and matrix2 column-wise. Matrix2 is added to the end of matrix1 /// @@ -191,12 +186,11 @@ public static F64Matrix CombineCols(this F64Matrix m1, F64Matrix m2) combineIndex += m2.ColumnCount; } - return new F64Matrix(features, rows, columns); } /// - /// Combines vector1 and vector2 row wise. Vector2 is added to the bottom of vector1 + /// Combines vector1 and vector2 row wise. Vector2 is added to the bottom of vector1 /// /// /// @@ -219,9 +213,8 @@ public static F64Matrix CombineRows(this double[] v1, double[] v2) return new F64Matrix(features, rows, cols); } - /// - /// Combines matrix and vector row wise. Vector is added to the bottom of the matrix + /// Combines matrix and vector row wise. Vector is added to the bottom of the matrix /// /// /// @@ -248,8 +241,8 @@ public static F64Matrix CombineRows(this F64Matrix m, double[] v) /// /// Combines vecor and matrix row wise. Matrix is added to the bottom of the vector /// - /// /// + /// /// public static F64Matrix CombineRows(this double[] v, F64Matrix m) { @@ -270,7 +263,6 @@ public static F64Matrix CombineRows(this double[] v, F64Matrix m) return new F64Matrix(features, rows, cols); } - /// /// Combines matrix1 and matrix2 row wise. Matrix2 is added to the bottom of matrix1 /// diff --git a/src/SharpLearning.Containers/Matrices/IMatrix.cs b/src/SharpLearning.Containers/Matrices/IMatrix.cs index 505c5945..59885323 100644 --- a/src/SharpLearning.Containers/Matrices/IMatrix.cs +++ b/src/SharpLearning.Containers/Matrices/IMatrix.cs @@ -38,7 +38,7 @@ public interface IMatrix T[] Column(int index); /// - /// Gets row at index + /// Gets row at index /// /// /// @@ -59,7 +59,7 @@ public interface IMatrix IMatrix Rows(params int[] indices); /// - /// Gets the specified rows as a matrix. + /// Gets the specified rows as a matrix. /// Output is copied to the provided matrix /// /// @@ -75,7 +75,7 @@ public interface IMatrix IMatrix Columns(params int[] indices); /// - /// Gets the specified cols as a matrix. + /// Gets the specified cols as a matrix. /// Output is copied to the provided matrix /// /// diff --git a/src/SharpLearning.Containers/Matrices/StringMatrix.cs b/src/SharpLearning.Containers/Matrices/StringMatrix.cs index 5104be37..3d208441 100644 --- a/src/SharpLearning.Containers/Matrices/StringMatrix.cs +++ b/src/SharpLearning.Containers/Matrices/StringMatrix.cs @@ -21,14 +21,14 @@ public StringMatrix(int rows, int cols) } /// - /// Creates a matrix from the provided values with the specified rows and cols + /// Creates a matrix from the provided values with the specified rows and cols /// /// /// /// public StringMatrix(string[] values, int rows, int cols) { - if (values == null) { throw new ArgumentNullException("values"); } + if (values == null) { throw new ArgumentNullException(nameof(values)); } if (values.Length != rows * cols) { throw new ArgumentException("feature array length does not match row * cols"); } if (rows < 1) { throw new ArgumentException("matrix must have at least 1 row"); } if (cols < 1) { throw new ArgumentException("matrix must have at least 1 col"); } @@ -67,8 +67,8 @@ public void At(int row, int col, string item) /// /// Access the matrix like a 2D array /// - /// /// + /// /// public string this[int row, int col] { @@ -95,7 +95,7 @@ public string[] Row(int index) } /// - /// gets the specified row. + /// gets the specified row. /// The values are copied to the provided row array. /// /// @@ -165,7 +165,7 @@ public IMatrix Rows(params int[] indices) } /// - /// Gets the specified rows as a matrix. + /// Gets the specified rows as a matrix. /// Output is copied to the provided matrix /// /// @@ -244,47 +244,26 @@ public string[] Data() /// Gets the number of columns /// /// - public int ColumnCount { get; private set; } + public int ColumnCount { get; } /// /// Gets the number of rows /// /// - public int RowCount { get; private set; } + public int RowCount { get; } - /// - /// - /// - /// - /// public bool Equals(StringMatrix other) { if (RowCount != other.RowCount) { return false; } if (ColumnCount != other.ColumnCount) { return false; } - if (!Data().SequenceEqual(other.Data())) { return false; } - - return true; + return Data().SequenceEqual(other.Data()); } - /// - /// - /// - /// - /// public override bool Equals(object obj) { - if (obj is StringMatrix other && Equals(other)) - { - return true; - } - - return false; + return obj is StringMatrix other && Equals(other); } - /// - /// - /// - /// public override int GetHashCode() { unchecked // Overflow is fine, just wrap diff --git a/src/SharpLearning.Containers/Matrices/StringMatrixExtensions.cs b/src/SharpLearning.Containers/Matrices/StringMatrixExtensions.cs index 1567ecfd..42114ef0 100644 --- a/src/SharpLearning.Containers/Matrices/StringMatrixExtensions.cs +++ b/src/SharpLearning.Containers/Matrices/StringMatrixExtensions.cs @@ -8,9 +8,6 @@ namespace SharpLearning.Containers.Matrices; /// public static class StringMatrixExtensions { - /// - /// - /// public static readonly Converter DefaultConverter = FloatingPointConversion.ToF64; /// @@ -47,7 +44,6 @@ public static void Map(this StringMatrix matrix, Func func) matrix.Data().Map(func); } - /// /// Iterates over all elements in the matrix and applies the function to the elements. /// The values are updated directly in the Matrix. @@ -60,7 +56,7 @@ public static void Map(this StringMatrix matrix, Func func) } /// - /// Combines vector1 and vector2 column-wise. Vector2 is added to the end of vector1 + /// Combines vector1 and vector2 column-wise. Vector2 is added to the end of vector1 /// /// /// @@ -90,7 +86,7 @@ public static StringMatrix CombineCols(this string[] v1, string[] v2) } /// - /// Combines matrix and vector column-wise. Vector is added to the end of the matrix + /// Combines matrix and vector column-wise. Vector is added to the end of the matrix /// /// /// @@ -118,15 +114,14 @@ public static StringMatrix CombineCols(this StringMatrix m, string[] v) combineIndex += m.ColumnCount; Array.Copy(v, otherIndex, features, combineIndex, 1); - combineIndex += 1; + combineIndex++; } - return new StringMatrix(features, rows, cols); } /// - /// Combines vector and and matrix column-wise. Vector is added to the front of the matrix + /// Combines vector and matrix column-wise. Vector is added to the front of the matrix /// /// /// @@ -148,7 +143,7 @@ public static StringMatrix CombineCols(this string[] v, StringMatrix m) for (var i = 0; i < rows; i++) { Array.Copy(v, i, features, combineIndex, 1); - combineIndex += 1; + combineIndex++; var matrixIndex = i * m.ColumnCount; Array.Copy(matrixArray, matrixIndex, features, combineIndex, m.ColumnCount); @@ -159,7 +154,7 @@ public static StringMatrix CombineCols(this string[] v, StringMatrix m) } /// - /// Combines matrix1 and matrix2 column-wise. Matrix2 is added to the end of matrix1 + /// Combines matrix1 and matrix2 column-wise. Matrix2 is added to the end of matrix1 /// /// /// @@ -196,7 +191,7 @@ public static StringMatrix CombineCols(this StringMatrix m1, StringMatrix m2) } /// - /// Combines matrix1 and matrix2 row-wise. Matrix2 is added to the end of matrix1 + /// Combines matrix1 and matrix2 row-wise. Matrix2 is added to the end of matrix1 /// /// /// diff --git a/src/SharpLearning.Containers/ObservationTargetSet.cs b/src/SharpLearning.Containers/ObservationTargetSet.cs index f15707de..d26ada23 100644 --- a/src/SharpLearning.Containers/ObservationTargetSet.cs +++ b/src/SharpLearning.Containers/ObservationTargetSet.cs @@ -9,14 +9,8 @@ namespace SharpLearning.Containers; /// public sealed class ObservationTargetSet : IEquatable { - /// - /// - /// public readonly F64Matrix Observations; - /// - /// - /// public readonly double[] Targets; /// @@ -30,38 +24,17 @@ public ObservationTargetSet(F64Matrix observations, double[] targets) Targets = targets ?? throw new ArgumentNullException(nameof(targets)); } - /// - /// - /// - /// - /// public bool Equals(ObservationTargetSet other) { if (!Observations.Equals(other.Observations)) { return false; } - if (!Targets.SequenceEqual(other.Targets)) { return false; } - - return true; + return Targets.SequenceEqual(other.Targets); } - /// - /// - /// - /// - /// public override bool Equals(object obj) { - if (obj is ObservationTargetSet other && Equals(other)) - { - return true; - } - - return false; + return obj is ObservationTargetSet other && Equals(other); } - /// - /// - /// - /// public override int GetHashCode() { unchecked // Overflow is fine, just wrap diff --git a/src/SharpLearning.Containers/ProbabilityPrediction.cs b/src/SharpLearning.Containers/ProbabilityPrediction.cs index 334fd5f2..5557723d 100644 --- a/src/SharpLearning.Containers/ProbabilityPrediction.cs +++ b/src/SharpLearning.Containers/ProbabilityPrediction.cs @@ -10,19 +10,10 @@ namespace SharpLearning.Containers; [Serializable] public struct ProbabilityPrediction : IEquatable { - /// - /// - /// public readonly double Prediction; - /// - /// - /// public readonly Dictionary Probabilities; - /// - /// - /// /// /// Dictionary containing the class name to class probability public ProbabilityPrediction(double prediction, Dictionary probabilities) @@ -31,11 +22,6 @@ public ProbabilityPrediction(double prediction, Dictionary proba Prediction = prediction; } - /// - /// - /// - /// - /// public bool Equals(ProbabilityPrediction other) { if (!Equal(Prediction, other.Prediction)) { return false; } @@ -45,67 +31,44 @@ public bool Equals(ProbabilityPrediction other) foreach (var item in zip) { if (item.This.Key != item.Other.Key) + { return false; + } + if (!Equal(item.This.Value, item.Other.Value)) + { return false; + } } return true; } - /// - /// - /// - /// - /// public override bool Equals(object obj) { - if (obj is ProbabilityPrediction) - return Equals((ProbabilityPrediction)obj); - return false; + return obj is ProbabilityPrediction prediction && Equals(prediction); } - /// - /// - /// - /// - /// - /// public static bool operator ==(ProbabilityPrediction p1, ProbabilityPrediction p2) { return p1.Equals(p2); } - /// - /// - /// - /// - /// - /// public static bool operator !=(ProbabilityPrediction p1, ProbabilityPrediction p2) { return !p1.Equals(p2); } - /// - /// - /// - /// public override int GetHashCode() { return Prediction.GetHashCode() ^ Probabilities.GetHashCode(); } - const double m_tolerence = 0.00001; + const double Tolerence = 0.00001; static bool Equal(double a, double b) { - var diff = Math.Abs(a * m_tolerence); - if (Math.Abs(a - b) <= diff) - { - return true; - } - - return false; + var diff = Math.Abs(a * Tolerence); + return Math.Abs(a - b) <= diff; } } diff --git a/src/SharpLearning.Containers/Views/F64MatrixColumnView.cs b/src/SharpLearning.Containers/Views/F64MatrixColumnView.cs index 95ee0e32..2552e9d5 100644 --- a/src/SharpLearning.Containers/Views/F64MatrixColumnView.cs +++ b/src/SharpLearning.Containers/Views/F64MatrixColumnView.cs @@ -39,12 +39,12 @@ public double this[int row] /// /// Gets the number of rows /// - public int Rows { get; private set; } + public int Rows { get; } /// /// Gets the interval of the column view /// - public Interval1D Interval { get { return new Interval1D(0, Rows); } } + public Interval1D Interval => new(0, Rows); double* RowPtr(int row) { diff --git a/src/SharpLearning.Containers/Views/F64MatrixPinnedPtr.cs b/src/SharpLearning.Containers/Views/F64MatrixPinnedPtr.cs index 7aa7472d..f106bae5 100644 --- a/src/SharpLearning.Containers/Views/F64MatrixPinnedPtr.cs +++ b/src/SharpLearning.Containers/Views/F64MatrixPinnedPtr.cs @@ -6,12 +6,12 @@ namespace SharpLearning.Containers.Views; /// /// Pinned pointer to F64Matrix. Proper disposal required. Preferably use this in a Using statement -/// +/// /// Using(var pinned = matrix.GetPinnedPointer()) /// { /// var view = pinned.View(); /// } -/// +/// /// public unsafe struct F64MatrixPinnedPtr : IDisposable { @@ -26,7 +26,7 @@ public unsafe struct F64MatrixPinnedPtr : IDisposable /// public F64MatrixPinnedPtr(F64Matrix matrix) { - if (matrix == null) { throw new ArgumentNullException("matrix"); } + if (matrix == null) { throw new ArgumentNullException(nameof(matrix)); } var data = matrix.Data(); m_handle = GCHandle.Alloc(data, GCHandleType.Pinned); @@ -44,9 +44,6 @@ public F64MatrixView View() return new F64MatrixView(m_ptr, m_rows, m_cols); } - /// - /// - /// public void Dispose() { if (m_ptr != null) diff --git a/src/SharpLearning.Containers/Views/F64MatrixView.cs b/src/SharpLearning.Containers/Views/F64MatrixView.cs index 2f1590d9..dc3d6723 100644 --- a/src/SharpLearning.Containers/Views/F64MatrixView.cs +++ b/src/SharpLearning.Containers/Views/F64MatrixView.cs @@ -40,13 +40,13 @@ public F64MatrixView(double* dataPtr, int rows, int cols, int strideInBytes) /// Gets the number of columns /// /// - public int ColumnCount { get; private set; } + public int ColumnCount { get; } /// /// Gets the number of rows /// /// - public int RowCount { get; private set; } + public int RowCount { get; } /// /// Gets the item at the specified position diff --git a/src/SharpLearning.Containers/Views/F64VectorPinnedPtr.cs b/src/SharpLearning.Containers/Views/F64VectorPinnedPtr.cs index 389aa350..d8b303ec 100644 --- a/src/SharpLearning.Containers/Views/F64VectorPinnedPtr.cs +++ b/src/SharpLearning.Containers/Views/F64VectorPinnedPtr.cs @@ -5,12 +5,12 @@ namespace SharpLearning.Containers.Views; /// /// Pinned pointer to F64Vector. Proper disposal required. Preferably use this in a Using statement -/// +/// /// Using(var pinned = vector.GetPinnedPointer()) /// { /// var view = pinned.View(); /// } -/// +/// /// public unsafe struct F64VectorPinnedPtr : IDisposable { @@ -18,10 +18,6 @@ public unsafe struct F64VectorPinnedPtr : IDisposable readonly GCHandle m_handle; double* m_ptr; - /// - /// - /// - /// public F64VectorPinnedPtr(double[] v) { m_length = v.Length; @@ -38,9 +34,6 @@ public F64VectorView View() return new F64VectorView(m_ptr, m_length); } - /// - /// - /// public void Dispose() { if (m_ptr != null) diff --git a/src/SharpLearning.Containers/Views/F64VectorView.cs b/src/SharpLearning.Containers/Views/F64VectorView.cs index 0d883712..b902b192 100644 --- a/src/SharpLearning.Containers/Views/F64VectorView.cs +++ b/src/SharpLearning.Containers/Views/F64VectorView.cs @@ -33,16 +33,12 @@ public double this[int index] set { m_ptr[index] = value; } } - /// - /// - /// - public int Length { get; private set; } - + public int Length { get; } /// /// Gets the interval of the F64View /// - public Interval1D Interval { get { return Interval1D.Create(0, Length); } } + public Interval1D Interval => Interval1D.Create(0, Length); /// /// Gets a sub-view over the specified interval diff --git a/src/SharpLearning.Containers/Views/Interval1D.cs b/src/SharpLearning.Containers/Views/Interval1D.cs index abf8b3f2..c7deeeb6 100644 --- a/src/SharpLearning.Containers/Views/Interval1D.cs +++ b/src/SharpLearning.Containers/Views/Interval1D.cs @@ -3,25 +3,13 @@ namespace SharpLearning.Containers.Views; -/// -/// -/// [StructLayout(LayoutKind.Sequential)] public struct Interval1D : IEquatable { - /// - /// - /// public readonly int FromInclusive; - /// - /// - /// public readonly int ToExclusive; - /// - /// - /// public readonly int Length; /// @@ -51,34 +39,17 @@ public static Interval1D Create(int fromInclusive, int toExclusive) return new Interval1D(fromInclusive, toExclusive); } - /// - /// - /// - /// - /// - /// public static bool operator !=(Interval1D x, Interval1D y) { return !(x == y); } - /// - /// - /// - /// - /// - /// public static bool operator ==(Interval1D x, Interval1D y) { return (x.FromInclusive == y.FromInclusive) && (x.ToExclusive == y.ToExclusive); } - /// - /// - /// - /// - /// public bool Equals(Interval1D other) { return (FromInclusive == other.FromInclusive) && @@ -86,23 +57,11 @@ public bool Equals(Interval1D other) (Length == other.Length); } - /// - /// - /// - /// - /// public override bool Equals(object other) { - if (other is Interval1D) - return Equals((Interval1D)other); - else - return false; + return other is Interval1D interval1D && Equals(interval1D); } - /// - /// - /// - /// public override int GetHashCode() { return FromInclusive.GetHashCode() ^ ToExclusive.GetHashCode() ^ Length.GetHashCode(); diff --git a/src/SharpLearning.Containers/Views/Interval2D.cs b/src/SharpLearning.Containers/Views/Interval2D.cs index f4894095..9fd150ff 100644 --- a/src/SharpLearning.Containers/Views/Interval2D.cs +++ b/src/SharpLearning.Containers/Views/Interval2D.cs @@ -3,20 +3,11 @@ namespace SharpLearning.Containers.Views; -/// -/// -/// [StructLayout(LayoutKind.Sequential)] public struct Interval2D : IEquatable { - /// - /// - /// public readonly Interval1D Rows; - /// - /// - /// public readonly Interval1D Cols; /// @@ -40,57 +31,28 @@ public static Interval2D Create(Interval1D rowInterval, Interval1D colInterval) return new Interval2D(rowInterval, colInterval); } - /// - /// - /// - /// - /// public bool Equals(Interval2D other) { return (Cols.Equals(other.Cols)) && (Rows.Equals(other.Rows)); } - /// - /// - /// - /// - /// public override bool Equals(object other) { - if (other is Interval2D) - return Equals((Interval2D)other); - else - return false; + return other is Interval2D interval2D && Equals(interval2D); } - /// - /// - /// - /// - /// - /// public static bool operator !=(Interval2D x, Interval2D y) { return !(x == y); } - /// - /// - /// - /// - /// - /// public static bool operator ==(Interval2D x, Interval2D y) { return (x.Cols == y.Cols) && (x.Rows == y.Rows); } - /// - /// - /// - /// public override int GetHashCode() { return Cols.GetHashCode() ^ Rows.GetHashCode(); diff --git a/src/SharpLearning.CrossValidation.Test/CrossValidationUtilitiesTest.cs b/src/SharpLearning.CrossValidation.Test/CrossValidationUtilitiesTest.cs index 68338026..6fb6c37b 100644 --- a/src/SharpLearning.CrossValidation.Test/CrossValidationUtilitiesTest.cs +++ b/src/SharpLearning.CrossValidation.Test/CrossValidationUtilitiesTest.cs @@ -20,10 +20,10 @@ public void CrossValidationUtilities_GetKFoldCrossValidationIndexSets() var expecteds = new List<(int[] trainingIndices, int[] validationIndices)> { - (new int[] { 0, 1, 3, 4, 5, 7, 9, 10, 11 }, new int[] { 6, 8, 2 }), - (new int[] { 0, 2, 3, 4, 6, 7, 8, 9, 10 }, new int[] { 1, 11, 5 }), - (new int[] { 0, 1, 2, 4, 5, 6, 8, 9, 11 }, new int[] { 7, 3, 10 }), - (new int[] { 1, 2, 3, 5, 6, 7, 8, 10, 11 }, new int[] { 0, 4, 9 }), + ([0, 1, 3, 4, 5, 7, 9, 10, 11], [6, 8, 2]), + ([0, 2, 3, 4, 6, 7, 8, 9, 10 ], [1, 11, 5]), + ([0, 1, 2, 4, 5, 6, 8, 9, 11 ], [7, 3, 10]), + ([1, 2, 3, 5, 6, 7, 8, 10, 11], [0, 4, 9]), }; Assert.AreEqual(expecteds.Count, actuals.Count); @@ -50,8 +50,8 @@ public void CrossValidationUtilities_GetKFoldCrossValidationIndexSets_Indices() var expecteds = new List<(int[] trainingIndices, int[] validationIndices)> { // Sets contains values from the indices array only. - (new int[] { 1, 3, 4, 5 }, new int[] { 2, 6, 0 }), - (new int[] { 0, 2, 6 }, new int[] { 1, 3, 4, 5 }), + ([1, 3, 4, 5 ], [2, 6, 0]), + ([0, 2, 6 ], [1, 3, 4, 5]), }; Assert.AreEqual(expecteds.Count, actuals.Count); @@ -74,12 +74,12 @@ public void CrossValidationUtilities_GetKFoldCrossValidationIndexSets_Handle_Rem var expecteds = new List<(int[] trainingIndices, int[] validationIndices)> { - (new int[] { 0, 1, 2, 3, 5, 6, 7, 9, 11, 12, 13, 14 }, new int[] { 10, 4, 8 }), - (new int[] { 0, 1, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13 }, new int[] { 2, 7, 14 }), - (new int[] { 1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 14 }, new int[] { 5, 13, 0 }), + ([ 0, 1, 2, 3, 5, 6, 7, 9, 11, 12, 13, 14 ], [10, 4, 8]), + ([ 0, 1, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13 ], [2, 7, 14]), + ([ 1, 2, 3, 4, 6, 7, 8, 9, 10, 11, 12, 14 ], [5, 13, 0]), // Handle remainder from target.length / foldsCount, // by adding remaining indices to the last set - (new int[] { 0, 2, 4, 5, 7, 8, 10, 13, 14 }, new int[] { 1, 3, 6, 9, 11, 12 }), + ([ 0, 2, 4, 5, 7, 8, 10, 13, 14 ], [1, 3, 6, 9, 11, 12]), }; Assert.AreEqual(expecteds.Count, actuals.Count); @@ -94,11 +94,11 @@ public void CrossValidationUtilities_GetKFoldCrossValidationIndexSets_Handle_Rem static void TraceIndexSets(IReadOnlyList<(int[] trainingIndices, int[] validationIndices)> indexSets) { - const string Separator = ", "; + const string separator = ", "; foreach (var set in indexSets) { - Trace.WriteLine("(new int[] { " + string.Join(Separator, set.trainingIndices) + " }, " + - "new int[] { " + string.Join(Separator, set.validationIndices) + " }),"); + Trace.WriteLine("(new int[] { " + string.Join(separator, set.trainingIndices) + " }, " + + "new int[] { " + string.Join(separator, set.validationIndices) + " }),"); } } } diff --git a/src/SharpLearning.CrossValidation.Test/DataSetUtilities.cs b/src/SharpLearning.CrossValidation.Test/DataSetUtilities.cs index e83d767f..f072374e 100644 --- a/src/SharpLearning.CrossValidation.Test/DataSetUtilities.cs +++ b/src/SharpLearning.CrossValidation.Test/DataSetUtilities.cs @@ -253,5 +253,4 @@ public static (F64Matrix observations, double[] targets) LoadDecisionTreeDataSet 1;0.012003;-0.217283 1;0.018883;-0.300577 1;0.071476;0.006014"; - } diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointExtensionsTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointExtensionsTest.cs index ede33ded..05b6ea11 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointExtensionsTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvePointExtensionsTest.cs @@ -14,7 +14,7 @@ public class LearningCurvePointExtensionsTest public void BiasVarianceLearningCurvePointExtensions_ToF64Matrix() { var sut = new List { new(10, 0.0, 1.0), - new(100, 3.0, 8.0), new(1000, 4.0, 4.0) }; + new(100, 3.0, 8.0), new(1000, 4.0, 4.0), }; var actual = sut.ToF64Matrix(); var expected = new F64Matrix([10, @@ -34,7 +34,7 @@ public void BiasVarianceLearningCurvePointExtensions_ToF64Matrix() public void BiasVarianceLearningCurvePointExtensions_Write() { var sut = new List { new(10, 0.0, 1.0), - new(100, 3.0, 8.0), new(1000, 4.0, 4.0) }; + new(100, 3.0, 8.0), new(1000, 4.0, 4.0), }; var writer = new StringWriter(); sut.Write(() => writer); diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvesCalculatorTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvesCalculatorTest.cs index 2fe48cdd..b72e1509 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvesCalculatorTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/LearningCurvesCalculatorTest.cs @@ -30,7 +30,7 @@ public void LearningCurvesCalculator_Calculate() var expected = new List() { new(32, 0, 0.141565953928265), - new(128, 0.0, 0.068970597423950036) + new(128, 0.0, 0.068970597423950036), }; CollectionAssert.AreEqual(expected, actual); @@ -56,7 +56,7 @@ public void LearningCurvesCalculator_Calculate_Indices_Provided() var expected = new List() { new(32, 0, 0.141565953928265), - new(128, 0.0, 0.068970597423950036) + new(128, 0.0, 0.068970597423950036), }; CollectionAssert.AreEqual(expected, actual); diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/NoShuffleLearningCurvesCalculatorTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/NoShuffleLearningCurvesCalculatorTest.cs index f197891b..06c8b937 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/NoShuffleLearningCurvesCalculatorTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/NoShuffleLearningCurvesCalculatorTest.cs @@ -25,7 +25,7 @@ public void NoShuffleLearningCurvesCalculator_Calculate() var expected = new List() { new(32, 0, 0.12874833873980004), - new(128, 0.0, 0.067720786718774989) + new(128, 0.0, 0.067720786718774989), }; CollectionAssert.AreEqual(expected, actual); diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/RandomLearningCurvesCalculatorTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/RandomLearningCurvesCalculatorTest.cs index 7bbe4b55..35c30c54 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/RandomLearningCurvesCalculatorTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/RandomLearningCurvesCalculatorTest.cs @@ -25,7 +25,7 @@ public void RandomLearningCurvesCalculator_Calculate() var expected = new List() { new(32, 0, 0.141565953928265), - new(128, 0.0, 0.068970597423950036) + new(128, 0.0, 0.068970597423950036), }; CollectionAssert.AreEqual(expected, actual); diff --git a/src/SharpLearning.CrossValidation.Test/LearningCurves/StratifiedLearningCurvesCalculatorTest.cs b/src/SharpLearning.CrossValidation.Test/LearningCurves/StratifiedLearningCurvesCalculatorTest.cs index 4c3b49db..23ce7d21 100644 --- a/src/SharpLearning.CrossValidation.Test/LearningCurves/StratifiedLearningCurvesCalculatorTest.cs +++ b/src/SharpLearning.CrossValidation.Test/LearningCurves/StratifiedLearningCurvesCalculatorTest.cs @@ -25,7 +25,7 @@ public void StratifiedLearningCurvesCalculator_Calculate() var expected = new List() { new(4, 0, 0.39999999999999997), - new(16, 0.0625, 0.33333333333333331) + new(16, 0.0625, 0.33333333333333331), }; CollectionAssert.AreEqual(expected, actual); diff --git a/src/SharpLearning.CrossValidation.Test/Samplers/RandomIndexSamplerTest.cs b/src/SharpLearning.CrossValidation.Test/Samplers/RandomIndexSamplerTest.cs index 895c4cfb..cfe5b061 100644 --- a/src/SharpLearning.CrossValidation.Test/Samplers/RandomIndexSamplerTest.cs +++ b/src/SharpLearning.CrossValidation.Test/Samplers/RandomIndexSamplerTest.cs @@ -17,7 +17,7 @@ public void RandomIndexSampler_Sample() var sampleIndices = sut.Sample(values, sampleSize); var actual = values.GetIndices(sampleIndices); - var expected = new int[] { 1, 2, 1, 2, 1, 3, 1, 2, 2, 1, 2, }; + var expected = new int[] { 1, 2, 1, 2, 1, 3, 1, 2, 2, 1, 2 }; CollectionAssert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestSetSplitTest.cs b/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestSetSplitTest.cs index e3230bb2..255e2f02 100644 --- a/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestSetSplitTest.cs +++ b/src/SharpLearning.CrossValidation.Test/TrainingTestSplitters/TrainingTestSetSplitTest.cs @@ -24,7 +24,6 @@ public void TrainingTestSetSplit_Equals() var testTargets1 = new double[] { 1, 2 }; var testTargets2 = new double[] { 2, 1 }; - var sut = new TrainingTestSetSplit(trainingObservations1, trainingTargets1, testObservations1, testTargets1); var equal = new TrainingTestSetSplit(trainingObservations1, trainingTargets1, testObservations1, testTargets1); var notEqual1 = new TrainingTestSetSplit(trainingObservations2, trainingTargets1, testObservations1, testTargets1); diff --git a/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs b/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs index 504a96f4..d84a4144 100644 --- a/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs +++ b/src/SharpLearning.CrossValidation/Augmentators/ContinuousMungeAugmentator.cs @@ -8,7 +8,7 @@ namespace SharpLearning.CrossValidation.Augmentators; /// Augmentates continuous data according to the MUNGE method: /// https://www.cs.cornell.edu/~caruana/compression.kdd06.pdf /// The method seeks to keep the original distribution of data. This is done by traversing each observation in the dataset -/// finding its nearest neighbour (euclidean distance) and modifiyng each feature in the observation according to a probability. +/// finding its nearest neighbour (euclidean distance) and modifiyng each feature in the observation according to a probability. /// The features are modified using the value from the nearest neighbour as the mean when sampling a new value from a uniform distribution. /// public sealed class ContinuousMungeAugmentator @@ -21,10 +21,10 @@ public sealed class ContinuousMungeAugmentator /// Augmentates continuous data according to the MUNGE method: /// https://www.cs.cornell.edu/~caruana/compression.kdd06.pdf /// The method seeks to keep the original distribution of data. This is done by traversing each observation in the dataset - /// finding its nearest neighbour (euclidean distance) and modifiyng each feature in the observation according to a probability. + /// finding its nearest neighbour (euclidean distance) and modifiyng each feature in the observation according to a probability. /// The features are modified using the value from the nearest neighbour as the mean when sampling a new value from a uniform distribution. /// - /// The probability that a feature will be altered with its nearest neighbour. + /// The probability that a feature will be altered with its nearest neighbour. /// Must be between 0.0 and 1.0. (Default is 0.2) /// Variance when sampling a new value for an augmentated sample. (Default is 1.0) /// Seed for random augmentation @@ -126,7 +126,9 @@ static double GetDistance(double[] p, double[] q) double diff = 0; if (p.Length != q.Length) + { throw new ArgumentException("Input vectors must be of the same dimension."); + } for (int x = 0, len = p.Length; x < len; x++) { diff --git a/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs b/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs index fb643ad1..62b3132d 100644 --- a/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs +++ b/src/SharpLearning.CrossValidation/Augmentators/NominalMungeAugmentator.cs @@ -8,7 +8,7 @@ namespace SharpLearning.CrossValidation.Augmentators; /// Augmentates nominal data according to the MUNGE method: /// https://www.cs.cornell.edu/~caruana/compression.kdd06.pdf /// The method seeks to keep the original distribution of data. This is done by traversing each observation in the dataset -/// finding its nearest neighbour (euclidean distance) and modifiyng each feature in the observation according to a probability. +/// finding its nearest neighbour (euclidean distance) and modifiyng each feature in the observation according to a probability. /// The features are modified using the value from the nearest neighbour as the mean when sampling a new value from a uniform distribution. /// public sealed class NominalMungeAugmentator @@ -20,10 +20,10 @@ public sealed class NominalMungeAugmentator /// Augmentates nominal data according to the MUNGE method: /// https://www.cs.cornell.edu/~caruana/compression.kdd06.pdf /// The method seeks to keep the original distribution of data. This is done by traversing each observation in the dataset - /// finding its nearest neighbour (euclidean distance) and modifiyng each feature in the observation according to a probability. + /// finding its nearest neighbour (euclidean distance) and modifiyng each feature in the observation according to a probability. /// The features are modified using the value from the nearest neighbour as the mean when sampling a new value from a uniform distribution. /// - /// The probability that a feature will be altered with its nearest neighbour. + /// The probability that a feature will be altered with its nearest neighbour. /// Must be between 0.0 and 1.0. (Default is 0.2) /// Seed for random augmentation public NominalMungeAugmentator(double probabilityParameter = 0.2, int seed = 432) diff --git a/src/SharpLearning.CrossValidation/CrossValidationUtilities.cs b/src/SharpLearning.CrossValidation/CrossValidationUtilities.cs index 11cf8b9f..e329a245 100644 --- a/src/SharpLearning.CrossValidation/CrossValidationUtilities.cs +++ b/src/SharpLearning.CrossValidation/CrossValidationUtilities.cs @@ -10,7 +10,7 @@ namespace SharpLearning.CrossValidation; public static class CrossValidationUtilities { /// - /// Returns a list of (trainingIndices, validationIndices) + /// Returns a list of (trainingIndices, validationIndices) /// for use with k-fold cross-validation. /// /// @@ -25,7 +25,7 @@ public static class CrossValidationUtilities } /// - /// Returns a list of (trainingIndices, validationIndices) + /// Returns a list of (trainingIndices, validationIndices) /// for use with k-fold cross-validation. /// /// diff --git a/src/SharpLearning.CrossValidation/CrossValidators/CrossValidation.cs b/src/SharpLearning.CrossValidation/CrossValidators/CrossValidation.cs index 4e0ce7b0..e3652e85 100644 --- a/src/SharpLearning.CrossValidation/CrossValidators/CrossValidation.cs +++ b/src/SharpLearning.CrossValidation/CrossValidators/CrossValidation.cs @@ -18,7 +18,7 @@ public class CrossValidation : ICrossValidation /// /// Cross validation for evaluating how learning algorithms perform on unseen observations /// - /// Sampling strategy for the provided indices + /// Sampling strategy for the provided indices /// before they are divided into the provided folds /// Number of folds that should be used for cross validation public CrossValidation(IIndexSampler sampler, int crossValidationFolds) @@ -48,7 +48,7 @@ public TPrediction[] CrossValidate(IIndexedLearner learner, } /// - /// Cross validated predictions. + /// Cross validated predictions. /// Only crossValidates within the provided indices. /// The predictions are returned in the predictions array. /// diff --git a/src/SharpLearning.CrossValidation/CrossValidators/ICrossValidation.cs b/src/SharpLearning.CrossValidation/CrossValidators/ICrossValidation.cs index eb675e74..ec0520b5 100644 --- a/src/SharpLearning.CrossValidation/CrossValidators/ICrossValidation.cs +++ b/src/SharpLearning.CrossValidation/CrossValidators/ICrossValidation.cs @@ -20,7 +20,7 @@ TPrediction[] CrossValidate(IIndexedLearner learner, F64Matrix observations, double[] targets); /// - /// Cross validated predictions. + /// Cross validated predictions. /// Only crossValidates within the provided indices. /// The predictions are returned in the predictions array. /// diff --git a/src/SharpLearning.CrossValidation/LearningCurves/ILearningCurvesCalculator.cs b/src/SharpLearning.CrossValidation/LearningCurves/ILearningCurvesCalculator.cs index 9fe79e0b..5f72a698 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/ILearningCurvesCalculator.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/ILearningCurvesCalculator.cs @@ -7,7 +7,7 @@ namespace SharpLearning.CrossValidation.LearningCurves; /// /// Bias variance analysis calculator for constructing learning curves. /// Learning curves can be used to determine if a model has high bias or high variance. -/// +/// /// Solutions for model with high bias: /// - Add more features. /// - Use a more sophisticated model @@ -21,7 +21,7 @@ public interface ILearningCurvesCalculator { /// /// Returns a list of BiasVarianceLearningCurvePoints for constructing learning curves. - /// The points contain sample size, training score and validation score. + /// The points contain sample size, training score and validation score. /// /// /// diff --git a/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvePoint.cs b/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvePoint.cs index fb0b10f8..f93feebe 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvePoint.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvePoint.cs @@ -8,17 +8,17 @@ namespace SharpLearning.CrossValidation.LearningCurves; public struct LearningCurvePoint : IEquatable { /// - /// + /// /// public readonly int SampleSize; /// - /// + /// /// public readonly double TrainingScore; /// - /// + /// /// public readonly double ValidationScore; @@ -36,7 +36,7 @@ public LearningCurvePoint(int sampleSize, double trainingScore, double validatio } /// - /// + /// /// /// /// @@ -44,13 +44,11 @@ public bool Equals(LearningCurvePoint other) { if (SampleSize != other.SampleSize) { return false; } if (TrainingScore != other.TrainingScore) { return false; } - if (ValidationScore != other.ValidationScore) { return false; } - - return true; + return ValidationScore == other.ValidationScore; } /// - /// + /// /// /// /// @@ -61,7 +59,7 @@ public bool Equals(LearningCurvePoint other) } /// - /// + /// /// /// /// @@ -74,20 +72,17 @@ public bool Equals(LearningCurvePoint other) } /// - /// + /// /// /// /// public override bool Equals(object obj) { - if (obj is LearningCurvePoint) - return Equals((LearningCurvePoint)obj); - else - return false; + return obj is LearningCurvePoint learningCurvePoint && Equals(learningCurvePoint); } /// - /// + /// /// /// public override int GetHashCode() diff --git a/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvePointExtensions.cs b/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvePointExtensions.cs index 900b70e8..a596d9c5 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvePointExtensions.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvePointExtensions.cs @@ -20,7 +20,7 @@ public static class LearningCurvePointExtensions public static F64Matrix ToF64Matrix(this List points) { if (points.Count == 0) - { throw new ArgumentException("There must be at least one element in the list to convert to a matrix"); }; + { throw new ArgumentException("There must be at least one element in the list to convert to a matrix"); } var matrix = new F64Matrix(points.Count, 3); for (var i = 0; i < points.Count; i++) @@ -47,7 +47,7 @@ public static void Write(this List points, Func { { "SampleCount", 0 }, { "TrainingError", 1 }, - { "ValidationError", 2 } + { "ValidationError", 2 }, }; points.ToF64Matrix() diff --git a/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvesCalculator.cs b/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvesCalculator.cs index 6c02b47c..454f554c 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvesCalculator.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/LearningCurvesCalculator.cs @@ -11,7 +11,7 @@ namespace SharpLearning.CrossValidation.LearningCurves; /// /// Bias variance analysis calculator for constructing learning curves. /// Learning curves can be used to determine if a model has high bias or high variance. -/// +/// /// Solutions for model with high bias: /// - Add more features. /// - Use a more sophisticated model @@ -36,7 +36,7 @@ public class LearningCurvesCalculator : ILearningCurvesCalculator /// Type of shuffler to use when splitting data /// The error metric used - /// A list of sample percentages determining the + /// A list of sample percentages determining the /// training data used in each point of the learning curve /// How many times should the data be shuffled pr. calculated point public LearningCurvesCalculator(ITrainingTestIndexSplitter trainingValidationIndexSplitter, @@ -55,7 +55,7 @@ public LearningCurvesCalculator(ITrainingTestIndexSplitter trainingValid /// /// Returns a list of BiasVarianceLearningCurvePoints for constructing learning curves. - /// The points contain sample size, training score and validation score. + /// The points contain sample size, training score and validation score. /// /// /// @@ -73,7 +73,7 @@ public List Calculate(IIndexedLearner learnerFa /// /// Returns a list of BiasVarianceLearningCurvePoints for constructing learning curves. - /// The points contain sample size, training score and validation score. + /// The points contain sample size, training score and validation score. /// /// /// diff --git a/src/SharpLearning.CrossValidation/LearningCurves/NoShuffleLearningCurvesCalculator.cs b/src/SharpLearning.CrossValidation/LearningCurves/NoShuffleLearningCurvesCalculator.cs index 19f6a08f..65ea7adf 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/NoShuffleLearningCurvesCalculator.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/NoShuffleLearningCurvesCalculator.cs @@ -7,9 +7,9 @@ namespace SharpLearning.CrossValidation.LearningCurves; /// /// Bias variance analysis calculator for constructing learning curves. /// Learning curves can be used to determine if a model has high bias or high variance. -/// +/// /// The order of the data is kept when splitting the data. -/// +/// /// Solutions for model with high bias: /// - Add more features. /// - Use a more sophisticated model @@ -25,11 +25,11 @@ public sealed class NoShuffleLearningCurvesCalculator : /// /// Bias variance analysis calculator for constructing learning curves. /// Learning curves can be used to determine if a model has high bias or high variance. - /// + /// /// The order of the data is kept when splitting the data. /// /// The error metric used - /// A list of sample percentages determining the + /// A list of sample percentages determining the /// training data used in each point of the learning curve /// Total percentage of data used for training public NoShuffleLearningCurvesCalculator(IMetric metric, double[] samplePercentages, double trainingPercentage) diff --git a/src/SharpLearning.CrossValidation/LearningCurves/RandomShuffleLearningCurvesCalculator.cs b/src/SharpLearning.CrossValidation/LearningCurves/RandomShuffleLearningCurvesCalculator.cs index 93c10754..1cffe4dc 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/RandomShuffleLearningCurvesCalculator.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/RandomShuffleLearningCurvesCalculator.cs @@ -7,9 +7,9 @@ namespace SharpLearning.CrossValidation.LearningCurves; /// /// Bias variance analysis calculator for constructing learning curves. /// Learning curves can be used to determine if a model has high bias or high variance. -/// +/// /// The order of the data is randomized. -/// +/// /// Solutions for model with high bias: /// - Add more features. /// - Use a more sophisticated model @@ -25,14 +25,14 @@ public sealed class RandomShuffleLearningCurvesCalculator : /// /// Bias variance analysis calculator for constructing learning curves. /// Learning curves can be used to determine if a model has high bias or high variance. - /// + /// /// The order of the data is randomized. /// /// The error metric used - /// A list of sample percentages determining the + /// A list of sample percentages determining the /// training data used in each point of the learning curve - /// Number of shuffles done at each sampling point /// Total percentage of data used for training + /// Number of shuffles done at each sampling point public RandomShuffleLearningCurvesCalculator(IMetric metric, double[] samplePercentages, double trainingPercentage, int numberOfShufflesPrSample = 5) : base(new RandomTrainingTestIndexSplitter(trainingPercentage), @@ -46,7 +46,7 @@ public RandomShuffleLearningCurvesCalculator(IMetric metric /// The order of the data is randomized. /// /// The error metric used - /// A list of sample percentages determining the + /// A list of sample percentages determining the /// training data used in each point of the learning curve /// /// diff --git a/src/SharpLearning.CrossValidation/LearningCurves/StratifiedLearningCurvesCalculator.cs b/src/SharpLearning.CrossValidation/LearningCurves/StratifiedLearningCurvesCalculator.cs index 6b82a6c7..dc819c88 100644 --- a/src/SharpLearning.CrossValidation/LearningCurves/StratifiedLearningCurvesCalculator.cs +++ b/src/SharpLearning.CrossValidation/LearningCurves/StratifiedLearningCurvesCalculator.cs @@ -7,9 +7,9 @@ namespace SharpLearning.CrossValidation.LearningCurves; /// /// Bias variance analysis calculator for constructing learning curves. /// Learning curves can be used to determine if a model has high bias or high variance. -/// +/// /// The order of the data is stratified to have similar distributions in training and validation set. -/// +/// /// Solutions for model with high bias: /// - Add more features. /// - Use a more sophisticated model @@ -25,11 +25,11 @@ public sealed class StratifiedLearningCurvesCalculator : /// /// Bias variance analysis calculator for constructing learning curves. /// Learning curves can be used to determine if a model has high bias or high variance. - /// + /// /// The order of the data is stratified to have similar distributions in training and validation set. /// /// The error metric used - /// A list of sample percentages determining the + /// A list of sample percentages determining the /// training data used in each point of the learning curve /// Total percentage of data used for training /// Number of shuffles done at each sampling point diff --git a/src/SharpLearning.CrossValidation/Samplers/IIndexSampler.cs b/src/SharpLearning.CrossValidation/Samplers/IIndexSampler.cs index 6f06437a..c3e69029 100644 --- a/src/SharpLearning.CrossValidation/Samplers/IIndexSampler.cs +++ b/src/SharpLearning.CrossValidation/Samplers/IIndexSampler.cs @@ -8,7 +8,7 @@ namespace SharpLearning.CrossValidation.Samplers; public interface IIndexSampler { /// - /// + /// /// /// /// @@ -16,7 +16,7 @@ public interface IIndexSampler int[] Sample(T[] data, int sampleSize); /// - /// + /// /// /// /// diff --git a/src/SharpLearning.CrossValidation/Samplers/NoShuffleIndexSampler.cs b/src/SharpLearning.CrossValidation/Samplers/NoShuffleIndexSampler.cs index 997e3286..8948091f 100644 --- a/src/SharpLearning.CrossValidation/Samplers/NoShuffleIndexSampler.cs +++ b/src/SharpLearning.CrossValidation/Samplers/NoShuffleIndexSampler.cs @@ -4,7 +4,7 @@ namespace SharpLearning.CrossValidation.Samplers; /// -/// No shuffle index sampler. +/// No shuffle index sampler. /// Simply takes the amount of samples specified by sample size from the start of the data /// /// @@ -18,13 +18,10 @@ public sealed class NoShuffleIndexSampler : IIndexSampler /// public int[] Sample(T[] data, int sampleSize) { - if (data.Length < sampleSize) - { - throw new ArgumentException("Sample size " + sampleSize + - " is larger than data size " + data.Length); - } - - return Enumerable.Range(0, sampleSize).ToArray(); + return data.Length < sampleSize + ? throw new ArgumentException("Sample size " + sampleSize + + " is larger than data size " + data.Length) + : Enumerable.Range(0, sampleSize).ToArray(); } /// @@ -42,12 +39,9 @@ public int[] Sample(T[] data, int sampleSize, int[] dataIndices) " is larger than data size " + data.Length); } - if (data.Length < dataIndices.Length) - { - throw new ArgumentException("dataIndice size " + dataIndices.Length + - " is larger than data size " + data.Length); - } - - return dataIndices.Take(sampleSize).ToArray(); + return data.Length < dataIndices.Length + ? throw new ArgumentException("dataIndice size " + dataIndices.Length + + " is larger than data size " + data.Length) + : dataIndices.Take(sampleSize).ToArray(); } } diff --git a/src/SharpLearning.CrossValidation/Samplers/RandomIndexSampler.cs b/src/SharpLearning.CrossValidation/Samplers/RandomIndexSampler.cs index ed421f4d..2186e75d 100644 --- a/src/SharpLearning.CrossValidation/Samplers/RandomIndexSampler.cs +++ b/src/SharpLearning.CrossValidation/Samplers/RandomIndexSampler.cs @@ -13,7 +13,7 @@ public sealed class RandomIndexSampler : IIndexSampler readonly Random m_random; /// - /// + /// /// /// public RandomIndexSampler(int seed) @@ -22,7 +22,7 @@ public RandomIndexSampler(int seed) } /// - /// + /// /// public RandomIndexSampler() : this(42) @@ -50,7 +50,7 @@ public int[] Sample(T[] data, int sampleSize) } /// - /// Random index sampler. Takes at random a sample of size sample size. + /// Random index sampler. Takes at random a sample of size sample size. /// Only samples within the indices provided in dataIndices /// /// diff --git a/src/SharpLearning.CrossValidation/Samplers/StratifiedIndexSampler.cs b/src/SharpLearning.CrossValidation/Samplers/StratifiedIndexSampler.cs index 4564deb5..86872f30 100644 --- a/src/SharpLearning.CrossValidation/Samplers/StratifiedIndexSampler.cs +++ b/src/SharpLearning.CrossValidation/Samplers/StratifiedIndexSampler.cs @@ -4,10 +4,10 @@ namespace SharpLearning.CrossValidation.Samplers; /// -/// Stratified index sampler. Samples. +/// Stratified index sampler. Samples. /// Takes a stratified sample of size sampleSize with distributions equal to the input data. /// http://en.wikipedia.org/wiki/Stratified_sampling -/// Returns a set of indices corresponding to the samples chosen. +/// Returns a set of indices corresponding to the samples chosen. /// /// Returns a set of indices corresponding to the samples chosen. public sealed class StratifiedIndexSampler : IIndexSampler @@ -15,10 +15,10 @@ public sealed class StratifiedIndexSampler : IIndexSampler readonly Random m_random; /// - /// Stratified index sampler. Samples. + /// Stratified index sampler. Samples. /// Takes a stratified sample of size sampleSize with distributions equal to the input data. /// http://en.wikipedia.org/wiki/Stratified_sampling - /// Returns a set of indices corresponding to the samples chosen. + /// Returns a set of indices corresponding to the samples chosen. /// /// public StratifiedIndexSampler(int seed) @@ -28,7 +28,7 @@ public StratifiedIndexSampler(int seed) /// /// Takes a stratified sample of size sampleSize with distributions equal to the input data. - /// Returns a set of indices corresponding to the samples chosen. + /// Returns a set of indices corresponding to the samples chosen. /// /// /// @@ -41,7 +41,7 @@ public int[] Sample(T[] data, int sampleSize) /// /// Takes a stratified sample of size sampleSize with distributions equal to the input data. /// http://en.wikipedia.org/wiki/Stratified_sampling - /// Returns a set of indices corresponding to the samples chosen. + /// Returns a set of indices corresponding to the samples chosen. /// Only samples within the indices provided in dataIndices /// /// diff --git a/src/SharpLearning.CrossValidation/TimeSeries/TimeSeriesCrossValidation.cs b/src/SharpLearning.CrossValidation/TimeSeries/TimeSeriesCrossValidation.cs index 5945f7f7..6eb4d35c 100644 --- a/src/SharpLearning.CrossValidation/TimeSeries/TimeSeriesCrossValidation.cs +++ b/src/SharpLearning.CrossValidation/TimeSeries/TimeSeriesCrossValidation.cs @@ -7,8 +7,8 @@ namespace SharpLearning.CrossValidation.TimeSeries; /// /// Time series cross-validation. Based on rolling validation using the original order of the data. -/// Using the specified initial size of the training set, a model is trained. -/// The model predicts the first observation following the training data. +/// Using the specified initial size of the training set, a model is trained. +/// The model predicts the first observation following the training data. /// Following, this data point is included in the training and a new model is trained, /// which predict the next observation. This continous until all observations following the initial training size, /// has been validated. @@ -24,9 +24,9 @@ public sealed class TimeSeriesCrossValidation /// Time series cross-validation. Based on rolling validation. /// /// The initial size of the training set. - /// The maximum size of the training set. Default is 0, which indicate no maximum size, - /// resulting in an expanding training interval. If a max is chosen, and the max size is reached, - /// this will result in a sliding training interval, moving forward in time, + /// The maximum size of the training set. Default is 0, which indicate no maximum size, + /// resulting in an expanding training interval. If a max is chosen, and the max size is reached, + /// this will result in a sliding training interval, moving forward in time, /// always using the data closest to the test period as training data. /// How often should the model be retrained. Default is 1, which will retrain the model at all time steps. /// Setting the interval to 5 will retrain the model at every fifth time step and use the current model for all time steps in between. @@ -63,8 +63,8 @@ public TimeSeriesCrossValidation(int initialTrainingSize, int maxTrainingSetSize /// /// Time series cross-validation. Based on rolling validation using the original order of the data. - /// Using the specified initial size of the training set, a model is trained. - /// The model predicts the first observation following the training data. + /// Using the specified initial size of the training set, a model is trained. + /// The model predicts the first observation following the training data. /// Following, this data point is included in the training and a new model is trained, /// which predict the next observation. This continuous until all observations following the initial training size, /// has been validated. diff --git a/src/SharpLearning.CrossValidation/TrainingTestSplitters/ITrainingTestIndexSplitter.cs b/src/SharpLearning.CrossValidation/TrainingTestSplitters/ITrainingTestIndexSplitter.cs index 53dd57ad..12df6128 100644 --- a/src/SharpLearning.CrossValidation/TrainingTestSplitters/ITrainingTestIndexSplitter.cs +++ b/src/SharpLearning.CrossValidation/TrainingTestSplitters/ITrainingTestIndexSplitter.cs @@ -8,7 +8,7 @@ namespace SharpLearning.CrossValidation.TrainingTestSplitters; public interface ITrainingTestIndexSplitter { /// - /// + /// /// /// /// diff --git a/src/SharpLearning.CrossValidation/TrainingTestSplitters/StratifiedTrainingTestIndexSplitter.cs b/src/SharpLearning.CrossValidation/TrainingTestSplitters/StratifiedTrainingTestIndexSplitter.cs index a056e510..e2683687 100644 --- a/src/SharpLearning.CrossValidation/TrainingTestSplitters/StratifiedTrainingTestIndexSplitter.cs +++ b/src/SharpLearning.CrossValidation/TrainingTestSplitters/StratifiedTrainingTestIndexSplitter.cs @@ -4,15 +4,15 @@ namespace SharpLearning.CrossValidation.TrainingTestSplitters; /// /// Creates a set of training and test indices based on the provided targets. -/// The indices are stratified before the split. This ensure that the distributions of training set and -/// test set are equal or at least very similar. +/// The indices are stratified before the split. This ensure that the distributions of training set and +/// test set are equal or at least very similar. /// /// public sealed class StratifiedTrainingTestIndexSplitter : TrainingTestIndexSplitter { /// - /// The indices are stratified before the split. This ensure that the distributions of training set and - /// test set are equal or at least very similar. + /// The indices are stratified before the split. This ensure that the distributions of training set and + /// test set are equal or at least very similar. /// /// /// diff --git a/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplit.cs b/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplit.cs index 0463bee1..1692ac2c 100644 --- a/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplit.cs +++ b/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplit.cs @@ -9,17 +9,17 @@ namespace SharpLearning.CrossValidation.TrainingTestSplitters; public sealed class TrainingTestIndexSplit : IEquatable { /// - /// + /// /// public readonly int[] TrainingIndices; /// - /// + /// /// public readonly int[] TestIndices; /// - /// + /// /// /// /// @@ -30,35 +30,28 @@ public TrainingTestIndexSplit(int[] trainingIndices, int[] testIndices) } /// - /// + /// /// /// /// public bool Equals(TrainingTestIndexSplit other) { if (!TrainingIndices.SequenceEqual(other.TrainingIndices)) { return false; } - if (!TestIndices.SequenceEqual(other.TestIndices)) { return false; } - - return true; + return TestIndices.SequenceEqual(other.TestIndices); } /// - /// + /// /// /// /// public override bool Equals(object obj) { - if (obj is TrainingTestIndexSplit other && Equals(other)) - { - return true; - } - - return false; + return obj is TrainingTestIndexSplit other && Equals(other); } /// - /// + /// /// /// public override int GetHashCode() diff --git a/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplitter.cs b/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplitter.cs index 5c904535..d61d8484 100644 --- a/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplitter.cs +++ b/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestIndexSplitter.cs @@ -15,7 +15,7 @@ public class TrainingTestIndexSplitter : ITrainingTestIndexSplitter readonly double m_trainingPercentage; /// - /// + /// /// /// the type of shuffler provided /// What percentage of the indices should go to the training set diff --git a/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestSetSplit.cs b/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestSetSplit.cs index eb62ffdb..cc713241 100644 --- a/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestSetSplit.cs +++ b/src/SharpLearning.CrossValidation/TrainingTestSplitters/TrainingTestSetSplit.cs @@ -10,12 +10,12 @@ namespace SharpLearning.CrossValidation.TrainingTestSplitters; public sealed class TrainingTestSetSplit : IEquatable { /// - /// + /// /// public readonly ObservationTargetSet TrainingSet; /// - /// + /// /// public readonly ObservationTargetSet TestSet; @@ -26,8 +26,8 @@ public sealed class TrainingTestSetSplit : IEquatable /// public TrainingTestSetSplit(ObservationTargetSet trainingSet, ObservationTargetSet testSet) { - TrainingSet = trainingSet ?? throw new ArgumentNullException("trainingSet"); - TestSet = testSet ?? throw new ArgumentNullException("testSet"); + TrainingSet = trainingSet ?? throw new ArgumentNullException(nameof(trainingSet)); + TestSet = testSet ?? throw new ArgumentNullException(nameof(testSet)); } /// @@ -45,35 +45,28 @@ public TrainingTestSetSplit(F64Matrix trainingObservations, double[] trainingTar } /// - /// + /// /// /// /// public bool Equals(TrainingTestSetSplit other) { if (!TrainingSet.Equals(other.TrainingSet)) { return false; } - if (!TestSet.Equals(other.TestSet)) { return false; } - - return true; + return TestSet.Equals(other.TestSet); } /// - /// + /// /// /// /// public override bool Equals(object obj) { - if (obj is TrainingTestSetSplit other && Equals(other)) - { - return true; - } - - return false; + return obj is TrainingTestSetSplit other && Equals(other); } /// - /// + /// /// /// public override int GetHashCode() diff --git a/src/SharpLearning.DecisionTrees.Test/DataSetUtilities.cs b/src/SharpLearning.DecisionTrees.Test/DataSetUtilities.cs index 518c11e3..81eedbe3 100644 --- a/src/SharpLearning.DecisionTrees.Test/DataSetUtilities.cs +++ b/src/SharpLearning.DecisionTrees.Test/DataSetUtilities.cs @@ -478,5 +478,4 @@ public static (F64Matrix observations, double[] targets) LoadDecisionTreeDataSet 1.52065;14.36;0;2.02;73.42;0;8.44;1.64;0;7 1.51651;14.38;0;1.94;73.61;0;8.48;1.57;0;7 1.51711;14.23;0;2.08;73.36;0;8.62;1.67;0;7"; - } diff --git a/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/GiniClassificationImpurityCalculatorTest.cs b/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/GiniClassificationImpurityCalculatorTest.cs index 0da58546..dc9bb54a 100644 --- a/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/GiniClassificationImpurityCalculatorTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/GiniClassificationImpurityCalculatorTest.cs @@ -11,7 +11,7 @@ public class GiniClassificationImpurityCalculatorTest [TestMethod] public void GiniClassificationImpurityCalculator_ImpurityImprovement() { - var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, }; + var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; var unique = values.Distinct().ToArray(); var parentInterval = Interval1D.Create(0, values.Length); @@ -33,7 +33,7 @@ public void GiniClassificationImpurityCalculator_ImpurityImprovement() [TestMethod] public void GiniClassificationImpurityCalculator_ImpurityImprovement_Weighted() { - var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, }; + var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; var unique = values.Distinct().ToArray(); var weights = values.Select(t => Weight(t)).ToArray(); @@ -56,7 +56,7 @@ public void GiniClassificationImpurityCalculator_ImpurityImprovement_Weighted() [TestMethod] public void GiniClassificationImpurityCalculator_ChildImpurities() { - var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, }; + var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; var unique = values.Distinct().ToArray(); var parentInterval = Interval1D.Create(0, values.Length); @@ -76,7 +76,7 @@ public void GiniClassificationImpurityCalculator_ChildImpurities() [TestMethod] public void GiniClassificationImpurityCalculator_NodeImpurity() { - var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, }; + var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; var unique = values.Distinct().ToArray(); var parentInterval = Interval1D.Create(0, values.Length); @@ -93,7 +93,7 @@ public void GiniClassificationImpurityCalculator_NodeImpurity() [TestMethod] public void GiniClassificationImpurityCalculator_LeafValue_Weighted() { - var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, }; + var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; var unique = values.Distinct().ToArray(); var weights = values.Select(t => Weight(t)).ToArray(); var parentInterval = Interval1D.Create(0, values.Length); @@ -111,8 +111,6 @@ public void GiniClassificationImpurityCalculator_LeafValue_Weighted() static double Weight(double t) { - if (t == 2.0) - return 10.0; - return 1.0; + return t == 2.0 ? 10.0 : 1.0; } } diff --git a/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/RegressionImpurityCalculatorTest.cs b/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/RegressionImpurityCalculatorTest.cs index 112ee89f..a93dc3a7 100644 --- a/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/RegressionImpurityCalculatorTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/ImpurityCalculators/RegressionImpurityCalculatorTest.cs @@ -11,7 +11,7 @@ public class RegressionImpurityCalculatorTest [TestMethod] public void RegressionImpurityCalculator_ImpurityImprovement() { - var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, }; + var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; var parentInterval = Interval1D.Create(0, values.Length); @@ -31,7 +31,7 @@ public void RegressionImpurityCalculator_ImpurityImprovement() [TestMethod] public void RegressionImpurityCalculator_ImpurityImprovement_Weighted() { - var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, }; + var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; var weights = values.Select(t => Weight(t)).ToArray(); var parentInterval = Interval1D.Create(0, values.Length); @@ -52,7 +52,7 @@ public void RegressionImpurityCalculator_ImpurityImprovement_Weighted() [TestMethod] public void RegressionImpurityCalculator_ChildImpurities() { - var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, }; + var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; var parentInterval = Interval1D.Create(0, values.Length); @@ -70,7 +70,7 @@ public void RegressionImpurityCalculator_ChildImpurities() [TestMethod] public void RegressionImpurityCalculator_NodeImpurity() { - var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, }; + var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; var parentInterval = Interval1D.Create(0, values.Length); @@ -86,7 +86,7 @@ public void RegressionImpurityCalculator_NodeImpurity() [TestMethod] public void RegressionImpurityCalculator_LeafValue_Weighted() { - var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, }; + var values = new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 }; var weights = values.Select(t => Weight(t)).ToArray(); var parentInterval = Interval1D.Create(0, values.Length); @@ -103,8 +103,6 @@ public void RegressionImpurityCalculator_LeafValue_Weighted() static double Weight(double t) { - if (t == 2.0) - return 10.0; - return 1.0; + return t == 2.0 ? 10.0 : 1.0; } } diff --git a/src/SharpLearning.DecisionTrees.Test/Learners/ClassificationDecisionTreeLearnerTest.cs b/src/SharpLearning.DecisionTrees.Test/Learners/ClassificationDecisionTreeLearnerTest.cs index aab40b44..97a5c95d 100644 --- a/src/SharpLearning.DecisionTrees.Test/Learners/ClassificationDecisionTreeLearnerTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/Learners/ClassificationDecisionTreeLearnerTest.cs @@ -207,8 +207,6 @@ static double ClassificationDecisionTreeLearner_Learn_Aptitude_Weighted(int tree public static double Weight(double v, double targetToWeigh, double weight) { - if (v == targetToWeigh) - return weight; - return 1.0; + return v == targetToWeigh ? weight : 1.0; } } diff --git a/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs b/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs index 78a4a3a1..6ffdee49 100644 --- a/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/Learners/RegressionDecisionTreeLearnerTest.cs @@ -114,8 +114,6 @@ static double RegressionDecisionTreeLearner_Learn_Weighted(int treeDepth, double static double Weight(double v, double weight) { - if (v < 3.0) - return weight; - return 1.0; + return v < 3.0 ? weight : 1.0; } } diff --git a/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs b/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs index 06e30e63..4d8106a3 100644 --- a/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/Models/ClassificationDecisionTreeModelTest.cs @@ -71,7 +71,6 @@ public void ClassificationDecisionTreeModel_Predict_Multiple_Indexed() Assert.AreEqual(0.1, error, 0.0000001); } - [TestMethod] public void ClassificationDecisionTreeModel_PredictProbability_Single() { @@ -92,7 +91,7 @@ public void ClassificationDecisionTreeModel_PredictProbability_Single() Assert.AreEqual(0.23076923076923078, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 } }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 } }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 } }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 } }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 } }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 } }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 } }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 } }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 } }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 } }) }; CollectionAssert.AreEqual(expected, actual); } @@ -110,7 +109,7 @@ public void ClassificationDecisionTreeModel_PredictProbability_Multiple() Assert.AreEqual(0.23076923076923078, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 } }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 } }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 } }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 } }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 } }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 } }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 } }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 } }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 } }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 } }) }; CollectionAssert.AreEqual(expected, actual); } @@ -131,7 +130,7 @@ public void ClassificationDecisionTreeModel_PredictProbability_Multiple_Indexed( Assert.AreEqual(0.1, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 }, }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 }, }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 } }), new(0, new Dictionary { { 0, 0.571428571428571 }, { 1, 0.428571428571429 } }), new(1, new Dictionary { { 0, 0.428571428571429 }, { 1, 0.571428571428571 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }), new(0, new Dictionary { { 0, 0.75 }, { 1, 0.25 } }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }), new(0, new Dictionary { { 0, 0.857142857142857 }, { 1, 0.142857142857143 } }), new(1, new Dictionary { { 0, 0.285714285714286 }, { 1, 0.714285714285714 } }) }; CollectionAssert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.DecisionTrees.Test/SplitSearchers/LinearSplitSearcherTest.cs b/src/SharpLearning.DecisionTrees.Test/SplitSearchers/LinearSplitSearcherTest.cs index ed716db0..8ae7fc5c 100644 --- a/src/SharpLearning.DecisionTrees.Test/SplitSearchers/LinearSplitSearcherTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/SplitSearchers/LinearSplitSearcherTest.cs @@ -71,7 +71,6 @@ public void LinearSplitSearcher_FindBestSplit_Weight() Assert.AreEqual(expected, actual); } - [TestMethod] public void LinearSplitSearcher_FindBestSplit_DecisionTreeData() { @@ -100,8 +99,8 @@ public void LinearSplitSearcher_FindBestSplit_DecisionTreeData() [TestMethod] public void LinearSplitSearcher_FindBestSplit_Large() { - var feature = new double[] { 0, 0.693147180559945, 0, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.09861228866811, 0, 0, 0, 1.09861228866811, 0, 0, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 2.07944154167984, 0, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 1.6094379124341, 0, 1.79175946922805, 1.38629436111989, 1.6094379124341, 0, 0, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 0, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 0, 2.19722457733622, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.79175946922805, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0, 0, 0, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 0, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0, 0, 1.6094379124341, 1.38629436111989, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.94591014905531, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0, 2.56494935746154, 0.693147180559945, 0, 0, 0, 0, 0, 1.6094379124341, 0, 0, 0, 0, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0, 2.56494935746154, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 2.07944154167984, 2.30258509299405, 1.38629436111989, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0, 0, 0, 0, 0, 1.09861228866811, 1.09861228866811, 0, 2.19722457733622, 2.30258509299405, 2.70805020110221, 2.07944154167984, 1.94591014905531, 2.39789527279837, 2.30258509299405, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 2.77258872223978, 2.19722457733622, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.09861228866811, 2.19722457733622, 2.30258509299405, 1.79175946922805, 2.07944154167984, 1.94591014905531, 1.38629436111989, 2.19722457733622, 3.04452243772342, 2.07944154167984, 2.19722457733622, 2.07944154167984, 2.484906649788, 0.693147180559945, 1.94591014905531, 1.6094379124341, 2.56494935746154, 1.38629436111989, 2.63905732961526, 1.94591014905531, 2.19722457733622, 1.94591014905531, 1.09861228866811, 1.38629436111989, 1.79175946922805, 2.99573227355399, 0, 2.484906649788, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.19722457733622, 2.56494935746154, 1.79175946922805, 2.83321334405622, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.79175946922805, 2.30258509299405, 2.56494935746154, 2.39789527279837, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.94591014905531, 2.77258872223978, 2.484906649788, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 2.484906649788, 0, 0.693147180559945, 0.693147180559945, 2.30258509299405, 1.94591014905531, 2.56494935746154, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.09861228866811, 0, 1.09861228866811, 2.19722457733622, 2.07944154167984, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.94591014905531, 1.6094379124341, 0.693147180559945, 2.77258872223978, 2.484906649788, 1.38629436111989, 2.39789527279837, 2.39789527279837, 2.19722457733622, 0.693147180559945, 1.6094379124341, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.19722457733622, 2.484906649788, 1.38629436111989, 2.07944154167984, 1.38629436111989, 1.38629436111989, 2.89037175789616, 1.6094379124341, 2.56494935746154, 1.38629436111989, 2.19722457733622, 1.38629436111989, 2.30258509299405, 2.07944154167984, 1.79175946922805, 2.56494935746154, 1.38629436111989, 2.77258872223978, 1.79175946922805, 2.39789527279837, 1.79175946922805, 1.09861228866811, 1.94591014905531, 1.79175946922805, 2.19722457733622, 1.6094379124341, 2.56494935746154, 1.38629436111989, 0.693147180559945, 2.07944154167984, 1.94591014905531, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.39789527279837, 2.39789527279837, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0, 1.79175946922805, 2.83321334405622, 1.79175946922805, 1.79175946922805, 0, 2.30258509299405, 1.38629436111989, 2.77258872223978, 1.94591014905531, 1.09861228866811, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.6094379124341, 2.30258509299405, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.79175946922805, 2.07944154167984, 2.39789527279837, 1.79175946922805, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 1.79175946922805, 2.30258509299405, 1.94591014905531, 1.09861228866811, 3.2188758248682, 1.94591014905531, 1.79175946922805, 0, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.94591014905531, 1.38629436111989, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.56494935746154, 1.6094379124341, 1.09861228866811, 1.94591014905531, 2.07944154167984, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.83321334405622, 1.94591014905531, 2.39789527279837, 2.30258509299405, 2.39789527279837, 1.94591014905531, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.09861228866811, 0.693147180559945, 1.6094379124341, 2.63905732961526, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.94591014905531, 2.30258509299405, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.99573227355399, 2.07944154167984, 2.77258872223978, 1.6094379124341, 0, 2.07944154167984, 1.09861228866811, 1.79175946922805, 2.39789527279837, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.19722457733622, 2.30258509299405, 2.484906649788, 1.94591014905531, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 2.63905732961526, 1.6094379124341, 1.94591014905531, 1.6094379124341, 2.63905732961526, 2.56494935746154, 1.94591014905531, 2.484906649788, 1.79175946922805, 2.07944154167984, 2.07944154167984, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.94591014905531, 0, 1.38629436111989, 2.484906649788, 2.39789527279837, 1.94591014905531, 2.39789527279837, 2.07944154167984, 1.6094379124341, 2.63905732961526, 1.79175946922805, 2.63905732961526, 2.39789527279837, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 1.79175946922805, 1.79175946922805, 2.63905732961526, 2.19722457733622, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.39789527279837, 1.38629436111989, 2.19722457733622, 1.79175946922805, 2.19722457733622, 1.09861228866811, 1.09861228866811, 2.19722457733622, 2.39789527279837, 1.09861228866811, 1.38629436111989, 1.79175946922805, 2.63905732961526, 0, 1.6094379124341, 1.79175946922805, 0, 2.63905732961526, 2.07944154167984, 1.38629436111989, 1.79175946922805, 2.63905732961526, 0.693147180559945, 1.79175946922805, 1.6094379124341, 2.19722457733622, 0, 2.39789527279837, 0.693147180559945, 1.79175946922805, 2.39789527279837, 2.63905732961526, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.70805020110221, 1.09861228866811, 1.79175946922805, 1.09861228866811, 2.56494935746154, 2.19722457733622, 1.6094379124341, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 2.56494935746154, 1.38629436111989, 1.38629436111989, 2.484906649788, 2.30258509299405, 2.30258509299405, 0, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.30258509299405, 1.94591014905531, 1.79175946922805, 0, 2.77258872223978, 2.56494935746154, 2.30258509299405, 2.19722457733622, 2.30258509299405, 1.6094379124341, 2.30258509299405, 1.79175946922805, 2.19722457733622, 0.693147180559945, 1.79175946922805, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.09861228866811, 2.07944154167984, 1.09861228866811, 2.30258509299405, 0.693147180559945, 2.83321334405622, 2.83321334405622, 2.83321334405622, 2.07944154167984, 2.77258872223978, 2.30258509299405, 1.38629436111989, 0.693147180559945, 2.56494935746154, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.79175946922805, 3.09104245335832, 2.30258509299405, 2.56494935746154, 1.38629436111989, 2.07944154167984, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.89037175789616, 2.19722457733622, 1.79175946922805, 2.07944154167984, 1.09861228866811, 0, 0, 0, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.70805020110221, 2.83321334405622, 1.79175946922805, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 2.39789527279837, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.07944154167984, 2.56494935746154, 2.07944154167984, 1.38629436111989, 2.70805020110221, 1.6094379124341, 2.484906649788, 1.79175946922805, 1.94591014905531, 2.70805020110221, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 0, 0, 1.38629436111989, 2.63905732961526, 1.79175946922805, 0.693147180559945, 1.94591014905531, 1.79175946922805, 2.07944154167984, 2.56494935746154, 0.693147180559945, 2.70805020110221, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.6094379124341, 0, 1.38629436111989, 2.30258509299405, 1.79175946922805, 0, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.484906649788, 2.30258509299405, 1.6094379124341, 1.79175946922805, 3.17805383034795, 2.70805020110221, 1.6094379124341, 2.19722457733622, 0.693147180559945, 2.70805020110221, 2.07944154167984, 2.63905732961526, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.56494935746154, 2.07944154167984, 1.94591014905531, 2.39789527279837, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0, 1.09861228866811, 2.484906649788, 2.19722457733622, 0, 1.94591014905531, 2.07944154167984, 0, 0, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.38629436111989, 2.39789527279837, 2.56494935746154, 0.693147180559945, 2.30258509299405, 1.94591014905531, 2.07944154167984, 2.30258509299405, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.6094379124341, 0.693147180559945, 0.693147180559945, 2.83321334405622, 2.70805020110221, 0.693147180559945, 1.94591014905531, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0, 2.484906649788, 1.38629436111989, 1.79175946922805, 2.484906649788, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.77258872223978, 2.63905732961526, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.89037175789616, 0, 0, 2.39789527279837, 1.38629436111989, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.39789527279837, 2.56494935746154, 1.6094379124341, 2.56494935746154, 2.63905732961526, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.6094379124341, 0, 0, 0, 0, 0.693147180559945, 1.94591014905531, 2.39789527279837, 0.693147180559945, 1.6094379124341, 1.94591014905531, 2.56494935746154, 2.39789527279837, 2.63905732961526, 0, 1.09861228866811, 0, 0, 2.07944154167984, 1.79175946922805, 2.63905732961526, 0.693147180559945, 1.09861228866811, 2.39789527279837, 0.693147180559945, 2.63905732961526, 2.07944154167984, 1.79175946922805, 1.38629436111989, 0, 1.79175946922805, 2.19722457733622, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.6094379124341, 2.39789527279837, 2.30258509299405, 1.09861228866811, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.6094379124341, 0.693147180559945, 2.30258509299405, 0, 1.94591014905531, 2.30258509299405, 1.94591014905531, 0.693147180559945, 2.77258872223978, 0, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 2.77258872223978, 2.63905732961526, 2.63905732961526, 1.6094379124341, 2.19722457733622, 2.484906649788, 2.484906649788, 1.79175946922805, 0.693147180559945, 2.484906649788, 2.30258509299405, 2.39789527279837, 2.484906649788, 1.94591014905531, 2.30258509299405, 1.6094379124341, 1.94591014905531, 1.09861228866811, 2.56494935746154, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 1.79175946922805, 0, 0, 0, 1.79175946922805, 0, 1.6094379124341, 0, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0, 2.56494935746154, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.484906649788, 0, 1.38629436111989, 1.94591014905531, 2.19722457733622, 2.19722457733622, 1.79175946922805, 1.79175946922805, 2.484906649788, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.94591014905531, 2.19722457733622, 1.94591014905531, 2.19722457733622, 3.04452243772342, 0.693147180559945, 2.30258509299405, 2.56494935746154, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.09861228866811, 2.94443897916644, 1.79175946922805, 2.83321334405622, 2.07944154167984, 1.38629436111989, 1.38629436111989, 2.19722457733622, 2.30258509299405, 2.30258509299405, 1.94591014905531, 1.94591014905531, 2.99573227355399, 2.30258509299405, 2.30258509299405, 2.30258509299405, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.39789527279837, 2.83321334405622, 1.09861228866811, 0, 2.30258509299405, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.30258509299405, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.94591014905531, 2.39789527279837, 1.94591014905531, 3.04452243772342, 1.94591014905531, 2.484906649788, 0.693147180559945, 2.07944154167984, 1.6094379124341, 2.77258872223978, 0, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0, 2.63905732961526, 1.38629436111989, 0, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.94591014905531, 2.484906649788, 1.79175946922805, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.99573227355399, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.07944154167984, 2.39789527279837, 0.693147180559945, 2.19722457733622, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 3.04452243772342, 1.94591014905531, 0.693147180559945, 2.19722457733622, 0, 0, 0.693147180559945, 0, 2.56494935746154, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.39789527279837, 1.38629436111989, 2.07944154167984, 2.77258872223978, 1.6094379124341, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.09861228866811, 2.30258509299405, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.6094379124341, 2.39789527279837, 2.07944154167984, 1.79175946922805, 2.63905732961526, 1.6094379124341, 2.94443897916644, 1.38629436111989, 2.19722457733622, 2.484906649788, 1.09861228866811, 1.38629436111989, 1.38629436111989, 2.89037175789616, 1.94591014905531, 1.09861228866811, 1.09861228866811, 2.94443897916644, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0.693147180559945, 2.39789527279837, 1.38629436111989, 2.94443897916644, 1.38629436111989, 1.38629436111989, 1.6094379124341, 2.56494935746154, 3.04452243772342, 2.39789527279837, 2.30258509299405, 1.94591014905531, 2.19722457733622, 1.38629436111989, 1.38629436111989, 0, 1.38629436111989, 1.6094379124341, 2.07944154167984, 2.07944154167984, 2.30258509299405, 1.38629436111989, 1.09861228866811, 2.77258872223978, 1.38629436111989, 3.04452243772342, 2.39789527279837, 1.38629436111989, 0.693147180559945, 1.94591014905531, 2.30258509299405, 2.63905732961526, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.94591014905531, 1.38629436111989, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.6094379124341, 1.6094379124341, 0.693147180559945, 2.77258872223978, 0, 2.19722457733622, 2.484906649788, 1.09861228866811, 2.56494935746154, 0.693147180559945, 1.6094379124341, 2.39789527279837, 0.693147180559945, 2.70805020110221, 1.09861228866811, 1.6094379124341, 2.30258509299405, 2.07944154167984, 0.693147180559945, 2.83321334405622, 1.94591014905531, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 1.94591014905531, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.6094379124341, 2.19722457733622, 2.63905732961526, 1.6094379124341, 2.19722457733622, 1.94591014905531, 2.484906649788, 2.70805020110221, 1.09861228866811, 2.07944154167984, 2.39789527279837, 2.484906649788, 2.39789527279837, 1.38629436111989, 2.19722457733622, 2.07944154167984, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.83321334405622, 1.09861228866811, 2.94443897916644, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.6094379124341, 3.09104245335832, 1.79175946922805, 0.693147180559945, 1.09861228866811, 2.94443897916644, 0.693147180559945, 0.693147180559945, 2.39789527279837, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.94591014905531, 0.693147180559945, 2.07944154167984, 1.94591014905531, 0, 2.07944154167984, 1.6094379124341, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.94591014905531, 0, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.07944154167984, 2.07944154167984, 0, 2.07944154167984, 2.07944154167984, 1.79175946922805, 2.30258509299405, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.07944154167984, 2.83321334405622, 2.07944154167984, 2.07944154167984, 2.484906649788, 1.6094379124341, 1.09861228866811, 2.30258509299405, 2.30258509299405, 2.70805020110221, 2.19722457733622, 1.79175946922805, 2.19722457733622, 1.94591014905531, 2.39789527279837, 2.39789527279837, 2.07944154167984, 1.79175946922805, 2.63905732961526, 1.6094379124341, 2.30258509299405, 1.6094379124341, 0.693147180559945, 2.30258509299405, 2.30258509299405, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.07944154167984, 2.63905732961526, 2.07944154167984, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.09861228866811, 2.77258872223978, 2.39789527279837, 1.6094379124341, 0.693147180559945, 1.6094379124341, 0, 2.99573227355399, 0.693147180559945, 0.693147180559945, 1.79175946922805, 2.484906649788, 1.38629436111989, 0, 2.07944154167984, 0, 1.09861228866811, 1.09861228866811, 2.19722457733622, 1.94591014905531, 1.94591014905531, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.94591014905531, 2.30258509299405, 2.07944154167984, 1.6094379124341, 1.94591014905531, 2.07944154167984, 2.07944154167984, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.484906649788, 1.09861228866811, 1.6094379124341, 1.94591014905531, 2.70805020110221, 1.09861228866811, 2.63905732961526, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 2.19722457733622, 2.07944154167984, 1.09861228866811, 1.94591014905531, 2.39789527279837, 2.99573227355399, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.70805020110221, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 2.63905732961526, 2.63905732961526, 1.94591014905531, 1.79175946922805, 2.07944154167984, 1.79175946922805, 2.07944154167984, 2.484906649788, 1.94591014905531, 1.6094379124341, 2.07944154167984, 1.38629436111989, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.09861228866811, 1.6094379124341, 2.94443897916644, 1.79175946922805, 0, 0.693147180559945, 0, 2.19722457733622, 0.693147180559945, 1.94591014905531, 2.07944154167984, 1.79175946922805, 1.38629436111989, 2.19722457733622, 1.09861228866811, 1.94591014905531, 2.19722457733622, 1.09861228866811, 2.07944154167984, 1.6094379124341, 2.19722457733622, 1.94591014905531, 0.693147180559945, 1.94591014905531, 2.19722457733622, 3.04452243772342, 1.6094379124341, 2.39789527279837, 0, 2.77258872223978, 1.6094379124341, 2.30258509299405, 1.38629436111989, 1.6094379124341, 2.07944154167984, 2.30258509299405, 1.38629436111989, 0.693147180559945, 2.39789527279837, 0, 1.94591014905531, 2.19722457733622, 1.09861228866811, 1.79175946922805, 2.07944154167984, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0, 2.19722457733622, 1.09861228866811, 2.30258509299405, 2.07944154167984, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.19722457733622, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.6094379124341, 2.30258509299405, 2.07944154167984, 1.09861228866811, 2.70805020110221, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 2.07944154167984, 1.79175946922805, 2.39789527279837, 1.94591014905531, 1.79175946922805, 2.39789527279837, 1.6094379124341, 2.39789527279837, 2.07944154167984, 1.09861228866811, 1.6094379124341, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.09861228866811, 2.19722457733622, 1.6094379124341, 2.07944154167984, 0, 1.6094379124341, 2.89037175789616, 1.38629436111989, 2.99573227355399, 2.30258509299405, 1.94591014905531, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.94591014905531, 2.30258509299405, 2.56494935746154, 2.77258872223978, 0.693147180559945, 2.99573227355399, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.6094379124341, 2.63905732961526, 0, 3.36729582998647, 1.6094379124341, 2.07944154167984, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.63905732961526, 0.693147180559945, 1.94591014905531, 2.56494935746154, 1.79175946922805, 2.30258509299405, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.94591014905531, 2.19722457733622, 2.70805020110221, 2.07944154167984, 2.70805020110221, 0, 1.38629436111989, 2.07944154167984, 1.79175946922805, 2.19722457733622, 1.94591014905531, 2.30258509299405, 2.30258509299405, 0.693147180559945, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.6094379124341, 2.07944154167984, 2.30258509299405, 2.07944154167984, 2.77258872223978, 1.79175946922805, 1.09861228866811, 1.79175946922805, 2.56494935746154, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.79175946922805, 2.30258509299405, 2.07944154167984, 2.07944154167984, 1.94591014905531, 0, 2.19722457733622, 1.79175946922805, 1.79175946922805, 1.94591014905531, 2.39789527279837, 2.30258509299405, 1.6094379124341, 2.484906649788, 2.39789527279837, 1.79175946922805, 2.77258872223978, 2.56494935746154, 1.09861228866811, 1.38629436111989, 2.484906649788, 2.19722457733622, 1.6094379124341, 1.09861228866811, 2.39789527279837, 1.09861228866811, 2.39789527279837, 1.94591014905531, 3.29583686600433, 1.38629436111989, 2.56494935746154, 0.693147180559945, 1.38629436111989, 2.19722457733622, 2.56494935746154, 2.19722457733622, 1.6094379124341, 2.19722457733622, 1.38629436111989, 1.94591014905531, 2.77258872223978, 1.38629436111989, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.30258509299405, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.09861228866811, 2.07944154167984, 2.70805020110221, 2.07944154167984, 1.79175946922805, 2.19722457733622, 2.63905732961526, 0.693147180559945, 2.19722457733622, 0.693147180559945, 2.19722457733622, 0.693147180559945, 2.07944154167984, 1.94591014905531, 2.70805020110221, 1.6094379124341, 1.6094379124341, 1.79175946922805, 2.39789527279837, 2.70805020110221, 1.09861228866811, 2.484906649788, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 1.6094379124341, 0, 2.63905732961526, 2.30258509299405, 1.09861228866811, 1.94591014905531, 0, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.79175946922805, 1.6094379124341, 2.07944154167984, 1.38629436111989, 2.39789527279837, 1.94591014905531, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.79175946922805, 1.09861228866811, 2.70805020110221, 2.484906649788, 1.94591014905531, 1.6094379124341, 1.38629436111989, 2.19722457733622, 1.6094379124341, 2.77258872223978, 2.77258872223978, 2.39789527279837, 2.39789527279837, 0.693147180559945, 2.484906649788, 2.484906649788, 1.6094379124341, 2.39789527279837, 1.09861228866811, 0.693147180559945, 2.19722457733622, 2.39789527279837, 2.484906649788, 2.19722457733622, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.07944154167984, 2.30258509299405, 2.30258509299405, 2.19722457733622, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 3.09104245335832, 2.70805020110221, 2.19722457733622, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.56494935746154, 1.09861228866811, 1.94591014905531, 2.70805020110221, 2.484906649788, 1.38629436111989, 2.19722457733622, 1.6094379124341, 2.484906649788, 1.38629436111989, 2.30258509299405, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.79175946922805, 2.19722457733622, 1.38629436111989, 0.693147180559945, 2.39789527279837, 0, 2.07944154167984, 2.94443897916644, 2.89037175789616, 2.30258509299405, 1.38629436111989, 1.6094379124341, 1.6094379124341, 2.39789527279837, 2.63905732961526, 2.484906649788, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.6094379124341, 2.63905732961526, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 2.19722457733622, 1.94591014905531, 1.6094379124341, 2.56494935746154, 0.693147180559945, 2.30258509299405, 0.693147180559945, 2.56494935746154, 2.56494935746154, 2.30258509299405, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.77258872223978, 0, 0.693147180559945, 2.07944154167984, 2.30258509299405, 2.07944154167984, 1.09861228866811, 2.30258509299405, 2.77258872223978, 2.07944154167984, 1.79175946922805, 1.38629436111989, 1.94591014905531, 2.89037175789616, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.6094379124341, 2.70805020110221, 1.38629436111989, 2.39789527279837, 2.30258509299405, 1.79175946922805, 1.6094379124341, 2.07944154167984, 2.77258872223978, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.56494935746154, 2.89037175789616, 1.09861228866811, 2.63905732961526, 2.56494935746154, 1.09861228866811, 2.30258509299405, 1.94591014905531, 2.39789527279837, 2.07944154167984, 1.79175946922805, 2.63905732961526, 3.29583686600433, 1.6094379124341, 2.19722457733622, 2.30258509299405, 1.38629436111989, 1.38629436111989, 2.484906649788, 2.07944154167984, 1.79175946922805, 1.09861228866811, 2.07944154167984, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.83321334405622, 1.09861228866811, 1.6094379124341, 2.07944154167984, 2.39789527279837, 2.30258509299405, 0, 1.94591014905531, 0.693147180559945, 2.484906649788, 2.39789527279837, 0, 0, 2.07944154167984, 1.09861228866811, 2.07944154167984, 2.63905732961526, 2.484906649788, 2.07944154167984, 1.38629436111989, 2.30258509299405, 2.07944154167984, 2.30258509299405, 3.09104245335832, 2.63905732961526, 1.09861228866811, 0, 2.94443897916644, 1.79175946922805, 1.94591014905531, 2.30258509299405, 2.30258509299405, 2.07944154167984, 2.39789527279837, 2.484906649788, 1.6094379124341, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0.693147180559945, 1.38629436111989, 2.19722457733622, 0, 2.19722457733622, 2.484906649788, 0.693147180559945, 1.79175946922805, 0.693147180559945, 2.30258509299405, 1.38629436111989, 1.94591014905531, 0, 0, 1.79175946922805, 0.693147180559945, 1.79175946922805, 0, 1.94591014905531, 1.6094379124341, 2.56494935746154, 2.07944154167984, 2.07944154167984, 0, 1.94591014905531, 2.39789527279837, 1.38629436111989, 1.38629436111989, 2.39789527279837, 2.39789527279837, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 2.07944154167984, 1.38629436111989, 2.77258872223978, 1.38629436111989, 2.39789527279837, 2.70805020110221, 1.38629436111989, 1.94591014905531, 1.94591014905531, 1.6094379124341, 2.19722457733622, 2.19722457733622, 2.07944154167984, 1.38629436111989, 0.693147180559945, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.70805020110221, 2.39789527279837, 2.63905732961526, 2.19722457733622, 1.09861228866811, 1.6094379124341, 1.94591014905531, 1.09861228866811, 2.19722457733622, 2.63905732961526, 2.56494935746154, 2.39789527279837, 0.693147180559945, 2.07944154167984, 2.19722457733622, 1.94591014905531, 2.07944154167984, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 2.63905732961526, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.94591014905531, 2.70805020110221, 0, 1.38629436111989, 2.77258872223978, 1.79175946922805, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.09861228866811, 1.79175946922805, 1.09861228866811, 2.30258509299405, 2.89037175789616, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.09861228866811, 2.39789527279837, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.09861228866811, 2.70805020110221, 1.94591014905531, 2.07944154167984, 1.6094379124341, 0, 1.79175946922805, 2.19722457733622, 1.94591014905531, 0.693147180559945, 0, 2.30258509299405, 2.30258509299405, 2.07944154167984, 1.79175946922805, 2.70805020110221, 1.38629436111989, 2.63905732961526, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.19722457733622, 2.19722457733622, 0, 1.38629436111989, 1.79175946922805, 2.19722457733622, 0.693147180559945, 1.94591014905531, 1.79175946922805, 2.19722457733622, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.94591014905531, 0, 0, 2.89037175789616, 0, 0, 1.6094379124341, 3.09104245335832, 1.38629436111989, 2.30258509299405, 2.63905732961526, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0, 0, 1.09861228866811, 2.39789527279837, 0.693147180559945, 0, 1.94591014905531, 2.39789527279837, 1.38629436111989, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 2.70805020110221, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.6094379124341, 0, 2.77258872223978, 2.99573227355399, 0, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.94591014905531, 1.09861228866811, 1.79175946922805, 0.693147180559945, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.19722457733622, 0, 1.09861228866811, 1.6094379124341, 1.6094379124341, 0, 2.83321334405622, 2.39789527279837, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.38629436111989, 2.63905732961526, 2.07944154167984, 0.693147180559945, 0, 1.94591014905531, 2.39789527279837, 1.38629436111989, 0, 1.94591014905531, 2.07944154167984, 2.07944154167984, 1.6094379124341, 2.19722457733622, 2.39789527279837, 1.79175946922805, 0, 1.79175946922805, 2.484906649788, 2.63905732961526, 2.19722457733622, 2.07944154167984, 2.07944154167984, 2.30258509299405, 2.77258872223978, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.09861228866811, 2.19722457733622, 2.56494935746154, 1.79175946922805, 0.693147180559945, 1.94591014905531, 0, 0.693147180559945, 2.99573227355399, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.07944154167984, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 2.07944154167984, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 0, 1.38629436111989, 2.63905732961526, 2.07944154167984, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.6094379124341, 0, 2.30258509299405, 1.38629436111989, 0, 0, 0, 1.94591014905531, 1.6094379124341, 2.56494935746154, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.07944154167984, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 2.30258509299405, 1.94591014905531, 0, 2.484906649788, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.07944154167984, 2.19722457733622, 1.09861228866811, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.79175946922805, 3.09104245335832, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.94591014905531, 1.6094379124341, 2.63905732961526, 1.38629436111989, 2.484906649788, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.56494935746154, 1.94591014905531, 2.94443897916644, 2.39789527279837, 1.6094379124341, 1.38629436111989, 0.693147180559945, 2.83321334405622, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.79175946922805, 2.30258509299405, 1.79175946922805, 1.94591014905531, 2.07944154167984, 2.30258509299405, 1.94591014905531, 2.19722457733622, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 2.07944154167984, 2.19722457733622, 2.39789527279837, 0.693147180559945, 2.70805020110221, 1.94591014905531, 2.484906649788, 2.30258509299405, 1.94591014905531, 2.30258509299405, 1.94591014905531, 2.484906649788, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 2.83321334405622, 2.19722457733622, 2.07944154167984, 1.6094379124341, 2.07944154167984, 2.19722457733622, 1.94591014905531, 2.19722457733622, 2.89037175789616, 2.07944154167984, 2.19722457733622, 2.77258872223978, 2.484906649788, 1.6094379124341, 2.07944154167984, 2.56494935746154, 1.38629436111989, 2.30258509299405, 1.94591014905531, 2.56494935746154, 1.6094379124341, 1.38629436111989, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.19722457733622, 2.19722457733622, 1.09861228866811, 2.19722457733622, 2.19722457733622, 1.09861228866811, 2.19722457733622, 2.39789527279837, 1.6094379124341, 2.07944154167984, 2.77258872223978, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.38629436111989, 2.70805020110221, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0.693147180559945, 2.70805020110221, 1.94591014905531, 1.79175946922805, 2.19722457733622, 2.83321334405622, 1.09861228866811, 1.79175946922805, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.56494935746154, 2.39789527279837, 2.19722457733622, 1.6094379124341, 2.30258509299405, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.6094379124341, 2.07944154167984, 1.79175946922805, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.6094379124341, 2.484906649788, 1.79175946922805, 0, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.30258509299405, 1.94591014905531, 0.693147180559945, 0.693147180559945, 2.39789527279837, 2.07944154167984, 1.79175946922805, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 2.19722457733622, 2.30258509299405, 1.79175946922805, 1.79175946922805, 1.6094379124341, 3.17805383034795, 1.94591014905531, 0.693147180559945, 1.94591014905531, 2.19722457733622, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.19722457733622, 1.94591014905531, 2.07944154167984, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.94591014905531, 2.07944154167984, 1.09861228866811, 2.07944154167984, 2.77258872223978, 2.30258509299405, 2.19722457733622, 0.693147180559945, 2.30258509299405, 3.2188758248682, 1.6094379124341, 1.94591014905531, 1.94591014905531, 1.79175946922805, 2.63905732961526, 2.83321334405622, 0, 1.09861228866811, 2.39789527279837, 1.79175946922805, 2.30258509299405, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.94591014905531, 2.70805020110221, 2.07944154167984, 1.94591014905531, 2.70805020110221, 1.94591014905531, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.19722457733622, 2.70805020110221, 0, 1.6094379124341, 2.39789527279837, 1.94591014905531, 1.38629436111989, 0.693147180559945, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.38629436111989, 2.19722457733622, 1.94591014905531, 1.6094379124341, 1.6094379124341, 1.6094379124341, 2.77258872223978, 2.19722457733622, 2.30258509299405, 1.79175946922805, 1.79175946922805, 1.79175946922805, 2.30258509299405, 1.6094379124341, 1.79175946922805, 2.19722457733622, 0, 1.09861228866811, 0, 2.63905732961526, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.484906649788, 1.79175946922805, 0.693147180559945, 1.38629436111989, 2.484906649788, 0, 1.94591014905531, 1.6094379124341, 1.09861228866811, 2.19722457733622, 0, 1.79175946922805, 2.56494935746154, 0.693147180559945, 1.79175946922805, 1.79175946922805, 2.19722457733622, 1.6094379124341, 1.79175946922805, 2.77258872223978, 0, 2.07944154167984, 2.484906649788, 1.09861228866811, 1.94591014905531, 1.94591014905531, 1.79175946922805, 1.94591014905531, 1.09861228866811, 2.19722457733622, 2.484906649788, 0.693147180559945, 1.09861228866811, 2.77258872223978, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.09861228866811, 1.09861228866811, 2.39789527279837, 2.63905732961526, 1.38629436111989, 2.19722457733622, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.79175946922805, 2.89037175789616, 1.6094379124341, 3.04452243772342, 2.63905732961526, 1.94591014905531, 2.30258509299405, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.70805020110221, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.39789527279837, 1.09861228866811, 1.6094379124341, 1.94591014905531, 0, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 2.484906649788, 1.79175946922805, 2.70805020110221, 3.04452243772342, 2.484906649788, 3.04452243772342, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 0.693147180559945, 2.19722457733622, 2.07944154167984, 0.693147180559945, 1.79175946922805, 2.30258509299405, 1.79175946922805, 0, 1.79175946922805, 2.99573227355399, 1.38629436111989, 1.38629436111989, 1.94591014905531, 2.07944154167984, 2.63905732961526, 2.30258509299405, 2.19722457733622, 1.79175946922805, 2.19722457733622, 0.693147180559945, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.09861228866811, 2.07944154167984, 2.70805020110221, 1.38629436111989, 0, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 2.56494935746154, 1.6094379124341, 0, 1.94591014905531, 1.6094379124341, 0, 2.19722457733622, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.63905732961526, 0, 1.94591014905531, 1.79175946922805, 2.56494935746154, 1.94591014905531, 2.19722457733622, 1.79175946922805, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.07944154167984, 1.38629436111989, 1.38629436111989, 2.07944154167984, 3.17805383034795, 1.94591014905531, 1.6094379124341, 1.94591014905531, 1.79175946922805, 0.693147180559945, 2.484906649788, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 2.39789527279837, 0, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.94591014905531, 2.30258509299405, 2.19722457733622, 0, 1.79175946922805, 0, 2.07944154167984, 2.39789527279837, 1.09861228866811, 1.79175946922805, 1.6094379124341, 1.38629436111989, 2.30258509299405, 2.19722457733622, 0, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.6094379124341, 2.19722457733622, 2.39789527279837, 1.6094379124341, 2.39789527279837, 2.484906649788, 2.484906649788, 2.39789527279837, 1.79175946922805, 2.30258509299405, 2.39789527279837, 2.19722457733622, 1.6094379124341, 2.39789527279837, 2.39789527279837, 2.30258509299405, 1.09861228866811, 2.07944154167984, 0.693147180559945, 2.89037175789616, 1.94591014905531, 1.6094379124341, 1.38629436111989, 2.39789527279837, 1.09861228866811, 2.07944154167984, 2.63905732961526, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.56494935746154, 2.56494935746154, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 2.56494935746154, 1.09861228866811, 2.30258509299405, 2.30258509299405, 2.484906649788, 2.70805020110221, 1.6094379124341, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 2.30258509299405, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.39789527279837, 0, 2.70805020110221, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.38629436111989, 2.30258509299405, 1.6094379124341, 1.38629436111989, 1.6094379124341, 2.39789527279837, 1.79175946922805, 1.38629436111989, 0, 1.38629436111989, 1.94591014905531, 2.39789527279837, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0.693147180559945, 2.07944154167984, 2.19722457733622, 2.63905732961526, 2.07944154167984, 1.94591014905531, 0.693147180559945, 2.07944154167984, 2.07944154167984, 2.484906649788, 1.79175946922805, 2.07944154167984, 1.94591014905531, 2.19722457733622, 1.09861228866811, 0, 1.6094379124341, 1.6094379124341, 0, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.6094379124341, 2.07944154167984, 2.19722457733622, 0.693147180559945, 1.09861228866811, 2.63905732961526, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.38629436111989, 2.94443897916644, 2.07944154167984, 1.09861228866811, 2.94443897916644, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.6094379124341, 2.30258509299405, 1.09861228866811, 0, 1.94591014905531, 2.30258509299405, 1.6094379124341, 2.30258509299405, 1.94591014905531, 1.09861228866811, 2.83321334405622, 1.09861228866811, 0, 2.39789527279837, 2.30258509299405, 1.6094379124341, 1.79175946922805, 1.94591014905531, 2.19722457733622, 2.89037175789616, 1.94591014905531, 2.19722457733622, 2.19722457733622, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.30258509299405, 1.6094379124341, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.79175946922805, 2.56494935746154, 2.30258509299405, 1.6094379124341, 0.693147180559945, 0, 2.70805020110221, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.30258509299405, 2.07944154167984, 2.30258509299405, 0, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 2.19722457733622, 2.63905732961526, 1.94591014905531, 2.30258509299405, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 2.39789527279837, 1.6094379124341, 1.94591014905531, 1.09861228866811, 2.30258509299405, 1.38629436111989, 2.07944154167984, 1.79175946922805, 1.6094379124341, 2.77258872223978, 2.19722457733622, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.07944154167984, 0.693147180559945, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.63905732961526, 1.38629436111989, 2.30258509299405, 2.07944154167984, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 2.07944154167984, 1.79175946922805, 2.30258509299405, 2.56494935746154, 1.79175946922805, 0, 0, 0.693147180559945, 2.19722457733622, 2.19722457733622, 2.484906649788, 0.693147180559945, 1.94591014905531, 1.38629436111989, 2.56494935746154, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 0, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 2.30258509299405, 2.07944154167984, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.38629436111989, 1.94591014905531, 1.6094379124341, 1.79175946922805, 2.70805020110221, 0.693147180559945, 2.99573227355399, 2.30258509299405, 1.94591014905531, 2.56494935746154, 1.79175946922805, 1.38629436111989, 1.6094379124341, 2.56494935746154, 2.39789527279837, 2.56494935746154, 1.6094379124341, 2.484906649788, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.79175946922805, 2.56494935746154, 1.6094379124341, 1.6094379124341, 2.19722457733622, 2.484906649788, 1.79175946922805, 1.79175946922805, 1.09861228866811, 2.19722457733622, 0.693147180559945, 2.484906649788, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.83321334405622, 1.6094379124341, 1.79175946922805, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.56494935746154, 1.09861228866811, 2.07944154167984, 2.484906649788, 1.79175946922805, 1.79175946922805, 1.94591014905531, 2.07944154167984, 1.94591014905531, 2.19722457733622, 1.6094379124341, 2.07944154167984, 1.6094379124341, 2.484906649788, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0, 1.94591014905531, 2.19722457733622, 0.693147180559945, 2.484906649788, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.79175946922805, 2.70805020110221, 0.693147180559945, 2.19722457733622, 2.07944154167984, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.6094379124341, 2.07944154167984, 2.30258509299405, 1.09861228866811, 1.38629436111989, 2.484906649788, 0, 2.63905732961526, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.38629436111989, 2.19722457733622, 3.17805383034795, 0.693147180559945, 2.484906649788, 1.6094379124341, 0.693147180559945, 0.693147180559945, 2.19722457733622, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.484906649788, 2.56494935746154, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.38629436111989, 2.484906649788, 1.6094379124341, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.484906649788, 1.38629436111989, 1.38629436111989, 2.30258509299405, 1.38629436111989, 2.39789527279837, 2.39789527279837, 2.19722457733622, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.94591014905531, 2.484906649788, 1.94591014905531, 3.04452243772342, 1.94591014905531, 2.07944154167984, 2.484906649788, 1.09861228866811, 1.6094379124341, 2.19722457733622, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.6094379124341, 2.70805020110221, 1.6094379124341, 2.94443897916644, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.39789527279837, 1.38629436111989, 0, 2.07944154167984, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.94591014905531, 2.39789527279837, 2.63905732961526, 0.693147180559945, 1.38629436111989, 2.70805020110221, 2.484906649788, 1.94591014905531, 1.79175946922805, 0, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.79175946922805, 0.693147180559945, 2.19722457733622, 0, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.30258509299405, 1.38629436111989, 2.07944154167984, 0, 1.38629436111989, 1.38629436111989, 2.39789527279837, 2.30258509299405, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.6094379124341, 2.56494935746154, 1.79175946922805, 1.09861228866811, 2.484906649788, 2.07944154167984, 1.6094379124341, 1.94591014905531, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.30258509299405, 2.39789527279837, 2.19722457733622, 2.19722457733622, 1.94591014905531, 0.693147180559945, 2.07944154167984, 2.19722457733622, 2.77258872223978, 2.56494935746154, 2.56494935746154, 1.6094379124341, 2.19722457733622, 1.09861228866811, 2.77258872223978, 1.79175946922805, 2.30258509299405, 2.19722457733622, 2.07944154167984, 1.09861228866811, 1.6094379124341, 0, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.6094379124341, 2.56494935746154, 1.6094379124341, 1.6094379124341, 2.07944154167984, 2.94443897916644, 1.79175946922805, 2.39789527279837, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.38629436111989, 2.39789527279837, 1.38629436111989, 1.6094379124341, 2.30258509299405, 2.07944154167984, 2.484906649788, 1.94591014905531, 1.6094379124341, 2.30258509299405, 1.38629436111989, 1.94591014905531, 2.19722457733622, 2.89037175789616, 0.693147180559945, 1.6094379124341, 2.30258509299405, 2.39789527279837, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.39789527279837, 1.94591014905531, 1.09861228866811, 2.56494935746154, 1.79175946922805, 2.89037175789616, 1.38629436111989, 2.30258509299405, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.19722457733622, 0.693147180559945, 1.6094379124341, 3.13549421592915, 2.30258509299405, 2.70805020110221, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 2.99573227355399, 1.6094379124341, 2.94443897916644, 2.39789527279837, 2.89037175789616, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.38629436111989, 1.38629436111989, 2.07944154167984, 1.38629436111989, 2.63905732961526, 1.94591014905531, 1.09861228866811, 3.09104245335832, 2.19722457733622, 2.39789527279837, 2.83321334405622, 1.38629436111989, 1.94591014905531, 1.6094379124341, 2.77258872223978, 1.09861228866811, 2.63905732961526, 1.6094379124341, 2.484906649788, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.38629436111989, 2.39789527279837, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 2.30258509299405, 1.38629436111989, 1.79175946922805, 1.94591014905531, 2.19722457733622, 1.79175946922805, 1.94591014905531, 2.30258509299405, 1.94591014905531, 2.19722457733622, 1.94591014905531, 1.38629436111989, 2.56494935746154, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.77258872223978, 1.6094379124341, 1.79175946922805, 1.94591014905531, 2.19722457733622, 2.19722457733622, 1.94591014905531, 2.77258872223978, 1.79175946922805, 2.07944154167984, 2.19722457733622, 1.6094379124341, 1.38629436111989, 1.38629436111989, 0, 1.38629436111989, 2.39789527279837, 2.39789527279837, 2.63905732961526, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0, 2.39789527279837, 1.79175946922805, 0.693147180559945, 2.56494935746154, 2.19722457733622, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0, 1.79175946922805, 1.6094379124341, 1.94591014905531, 2.30258509299405, 1.79175946922805, 2.07944154167984, 1.09861228866811, 2.39789527279837, 2.07944154167984, 2.19722457733622, 1.94591014905531, 0, 1.38629436111989, 2.484906649788, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.38629436111989, 3.04452243772342, 1.6094379124341, 2.77258872223978, 1.38629436111989, 1.09861228866811, 2.07944154167984, 2.07944154167984, 2.07944154167984, 1.94591014905531, 2.07944154167984, 1.38629436111989, 2.19722457733622, 2.94443897916644, 1.79175946922805, 1.6094379124341, 2.07944154167984, 2.39789527279837, 2.07944154167984, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.38629436111989, 3.04452243772342, 1.38629436111989, 1.94591014905531, 2.63905732961526, 2.484906649788, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.39789527279837, 2.19722457733622, 1.6094379124341, 1.09861228866811, 2.39789527279837, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.63905732961526, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.6094379124341, 1.79175946922805, 2.30258509299405, 2.30258509299405, 1.6094379124341, 2.30258509299405, 2.19722457733622, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.30258509299405, 2.07944154167984, 1.38629436111989, 0, 2.07944154167984, 2.39789527279837, 2.56494935746154, 1.38629436111989, 1.6094379124341, 0, 1.94591014905531, 2.19722457733622, 2.07944154167984, 2.484906649788, 1.6094379124341, 3.29583686600433, 1.6094379124341, 1.79175946922805, 0, 0, 2.56494935746154, 2.484906649788, 2.39789527279837, 1.09861228866811, 2.63905732961526, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 2.39789527279837, 1.79175946922805, 1.79175946922805, 1.09861228866811, 1.79175946922805, 2.83321334405622, 1.94591014905531, 1.79175946922805, 2.99573227355399, 2.70805020110221, 2.39789527279837, 2.30258509299405, 0, 1.38629436111989, 2.484906649788, 0.693147180559945, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0, 1.79175946922805, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.38629436111989, 0, 2.30258509299405, 1.09861228866811, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.6094379124341, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.56494935746154, 1.94591014905531, 0, 0.693147180559945, 2.63905732961526, 0.693147180559945, 1.09861228866811, 2.30258509299405, 2.30258509299405, 1.09861228866811, 2.30258509299405, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.94591014905531, 1.79175946922805, 2.63905732961526, 1.79175946922805, 2.19722457733622, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.09861228866811, 2.07944154167984, 0, 1.09861228866811, 2.39789527279837, 1.6094379124341, 2.07944154167984, 2.30258509299405, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.19722457733622, 2.484906649788, 1.6094379124341, 1.94591014905531, 1.94591014905531, 1.6094379124341, 1.79175946922805, 3.2188758248682, 1.94591014905531, 2.30258509299405, 2.19722457733622, 2.56494935746154, 1.94591014905531, 1.94591014905531, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.79175946922805, 2.07944154167984, 1.6094379124341, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.09861228866811, 2.63905732961526, 1.6094379124341, 2.07944154167984, 2.77258872223978, 2.07944154167984, 1.6094379124341, 1.79175946922805, 0.693147180559945, 0.693147180559945, 2.39789527279837, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.30258509299405, 2.484906649788, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 2.30258509299405, 1.94591014905531, 2.07944154167984, 2.19722457733622, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.63905732961526, 2.39789527279837, 0.693147180559945, 2.56494935746154, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.6094379124341, 2.484906649788, 2.30258509299405, 2.07944154167984, 2.39789527279837, 1.6094379124341, 1.94591014905531, 2.30258509299405, 2.07944154167984, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.19722457733622, 2.484906649788, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.484906649788, 1.6094379124341, 1.09861228866811, 1.94591014905531, 0, 2.07944154167984, 2.07944154167984, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.94591014905531, 1.09861228866811, 2.99573227355399, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 2.19722457733622, 2.56494935746154, 1.38629436111989, 2.484906649788, 1.79175946922805, 1.09861228866811, 2.39789527279837, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.38629436111989, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.79175946922805, 2.99573227355399, 1.09861228866811, 2.19722457733622, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.39789527279837, 1.6094379124341, 0.693147180559945, 1.38629436111989, 2.94443897916644, 1.6094379124341, 1.94591014905531, 2.77258872223978, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.79175946922805, 0, 1.6094379124341, 2.07944154167984, 1.38629436111989, 0, 0, 1.94591014905531, 0, 1.79175946922805, 2.63905732961526, 2.56494935746154, 2.89037175789616, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.07944154167984, 2.07944154167984, 1.38629436111989, 2.30258509299405, 2.30258509299405, 2.19722457733622, 2.99573227355399, 1.6094379124341, 2.63905732961526, 1.6094379124341, 2.30258509299405, 0.693147180559945, 1.09861228866811, 2.77258872223978, 2.30258509299405, 2.39789527279837, 2.63905732961526, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 2.39789527279837, 2.484906649788, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.09861228866811, 2.56494935746154, 1.79175946922805, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.79175946922805, 1.79175946922805, 1.94591014905531, 2.39789527279837, 1.6094379124341, 1.94591014905531, 1.38629436111989, 2.07944154167984, 2.07944154167984, 1.94591014905531, 2.77258872223978, 2.484906649788, 2.89037175789616, 2.07944154167984, 2.39789527279837, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.38629436111989, 2.19722457733622, 1.94591014905531, 1.38629436111989, 2.30258509299405, 2.07944154167984, 2.83321334405622, 2.83321334405622, 2.19722457733622, 1.94591014905531, 1.09861228866811, 1.94591014905531, 1.6094379124341, 2.484906649788, 1.6094379124341, 2.484906649788, 1.79175946922805, 2.30258509299405, 2.70805020110221, 1.79175946922805, 2.07944154167984, 1.94591014905531, 1.94591014905531, 2.07944154167984, 1.6094379124341, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.56494935746154, 2.07944154167984, 1.94591014905531, 2.19722457733622, 2.19722457733622, 0.693147180559945, 2.19722457733622, 2.07944154167984, 2.94443897916644, 1.79175946922805, 0, 1.94591014905531, 2.70805020110221, 1.79175946922805, 2.19722457733622, 2.484906649788, 2.63905732961526, 1.6094379124341, 2.39789527279837, 2.19722457733622, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.79175946922805, 2.39789527279837, 0, 1.09861228866811, 1.79175946922805, 2.484906649788, 2.39789527279837, 2.07944154167984, 1.09861228866811, 2.07944154167984, 0.693147180559945, 2.07944154167984, 1.94591014905531, 1.6094379124341, 0, 1.38629436111989, 1.38629436111989, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.79175946922805, 1.09861228866811, 2.30258509299405, 2.07944154167984, 1.38629436111989, 1.94591014905531, 2.99573227355399, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.19722457733622, 1.09861228866811, 2.30258509299405, 1.38629436111989, 2.39789527279837, 2.19722457733622, 1.79175946922805, 2.30258509299405, 1.09861228866811, 2.30258509299405, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.94591014905531, 2.70805020110221, 2.30258509299405, 1.38629436111989, 1.09861228866811, 1.79175946922805, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.09861228866811, 2.99573227355399, 2.07944154167984, 1.94591014905531, 0, 1.79175946922805, 1.6094379124341, 2.70805020110221, 0.693147180559945, 1.09861228866811, 2.89037175789616, 2.39789527279837, 1.94591014905531, 2.07944154167984, 2.30258509299405, 0, 2.19722457733622, 2.30258509299405, 1.09861228866811, 1.94591014905531, 2.30258509299405, 1.79175946922805, 3.09104245335832, 1.79175946922805, 2.19722457733622, 1.94591014905531, 2.30258509299405, 2.07944154167984, 2.07944154167984, 0, 2.63905732961526, 2.07944154167984, 2.30258509299405, 1.79175946922805, 2.19722457733622, 1.09861228866811, 0, 2.63905732961526, 2.39789527279837, 0.693147180559945, 0, 2.07944154167984, 1.6094379124341, 1.6094379124341, 2.56494935746154, 1.79175946922805, 2.30258509299405, 2.07944154167984, 1.79175946922805, 0, 1.79175946922805, 2.63905732961526, 2.89037175789616, 1.6094379124341, 1.38629436111989, 1.09861228866811, 2.39789527279837, 2.484906649788, 1.38629436111989, 1.38629436111989, 2.39789527279837, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0.693147180559945, 1.6094379124341, 1.09861228866811, 2.56494935746154, 1.38629436111989, 1.6094379124341, 2.77258872223978, 1.94591014905531, 2.39789527279837, 1.94591014905531, 1.6094379124341, 2.484906649788, 2.39789527279837, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.39789527279837, 2.77258872223978, 2.89037175789616, 0, 1.09861228866811, 1.79175946922805, 2.19722457733622, 2.07944154167984, 2.30258509299405, 1.09861228866811, 2.07944154167984, 2.56494935746154, 0.693147180559945, 2.19722457733622, 1.6094379124341, 2.83321334405622, 2.19722457733622, 1.94591014905531, 2.07944154167984, 1.09861228866811, 2.30258509299405, 2.39789527279837, 0.693147180559945, 2.07944154167984, 1.09861228866811, 2.89037175789616, 1.79175946922805, 2.77258872223978, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.09861228866811, 2.39789527279837, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.94591014905531, 2.63905732961526, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.79175946922805, 1.94591014905531, 2.19722457733622, 1.09861228866811, 2.484906649788, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.94591014905531, 2.19722457733622, 2.70805020110221, 2.484906649788, 0.693147180559945, 2.19722457733622, 1.94591014905531, 1.09861228866811, 2.07944154167984, 1.38629436111989, 1.94591014905531, 2.39789527279837, 0.693147180559945, 0, 0, 2.30258509299405, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.09861228866811, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.38629436111989, 1.94591014905531, 1.38629436111989, 2.07944154167984, 2.56494935746154, 2.30258509299405, 1.79175946922805, 2.77258872223978, 2.07944154167984, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.94591014905531, 2.39789527279837, 2.56494935746154, 1.09861228866811, 1.6094379124341, 2.07944154167984, 2.484906649788, 1.94591014905531, 1.38629436111989, 2.30258509299405, 1.94591014905531, 0.693147180559945, 1.6094379124341, 2.99573227355399, 2.07944154167984, 2.39789527279837, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.79175946922805, 1.94591014905531, 2.484906649788, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.79175946922805, 2.07944154167984, 2.07944154167984, 2.07944154167984, 1.79175946922805, 1.38629436111989, 2.99573227355399, 2.77258872223978, 2.83321334405622, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.30258509299405, 1.79175946922805, 2.30258509299405, 2.39789527279837, 1.09861228866811, 2.30258509299405, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 1.6094379124341, 2.39789527279837, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.6094379124341, 2.77258872223978, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0.693147180559945, 2.19722457733622, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 2.19722457733622, 1.38629436111989, 1.09861228866811, 0, 0, 2.30258509299405, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 2.30258509299405, 0.693147180559945, 0, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.63905732961526, 1.38629436111989, 1.79175946922805, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 2.56494935746154, 0, 0, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 2.39789527279837, 2.19722457733622, 0.693147180559945, 1.94591014905531, 1.38629436111989, 1.09861228866811, 2.30258509299405, 2.07944154167984, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.94591014905531, 0.693147180559945, 2.99573227355399, 2.70805020110221, 1.79175946922805, 0.693147180559945, 0.693147180559945, 2.484906649788, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0, 0, 1.6094379124341, 2.30258509299405, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 0, 2.07944154167984, 1.09861228866811, 1.09861228866811, 2.39789527279837, 0.693147180559945, 0, 1.6094379124341, 1.6094379124341, 1.94591014905531, 1.09861228866811, 0, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.38629436111989, 2.30258509299405, 2.39789527279837, 1.6094379124341, 0, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0, 0, 2.63905732961526, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0, 1.38629436111989, 0, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.79175946922805, 2.484906649788, 1.09861228866811, 2.07944154167984, 1.38629436111989, 0, 1.38629436111989, 1.94591014905531, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 2.07944154167984, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 3.17805383034795, 2.19722457733622, 2.56494935746154, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0, 2.19722457733622, 1.09861228866811, 1.79175946922805, 2.19722457733622, 1.09861228866811, 0, 1.38629436111989, 1.94591014905531, 1.38629436111989, 0, 2.07944154167984, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.30258509299405, 1.79175946922805, 0, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 0, 1.6094379124341, 1.94591014905531, 2.07944154167984, 0.693147180559945, 1.79175946922805, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.6094379124341, 1.09861228866811, 2.07944154167984, 1.6094379124341, 0, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.19722457733622, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 2.19722457733622, 0, 1.94591014905531, 0, 2.77258872223978, 1.38629436111989, 1.6094379124341, 0, 0, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.79175946922805, 2.39789527279837, 1.79175946922805, 2.30258509299405, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 2.07944154167984, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0, 1.09861228866811, 2.19722457733622, 0, 1.94591014905531, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.09861228866811, 2.39789527279837, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0.693147180559945, 2.30258509299405, 1.79175946922805, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.6094379124341, 2.07944154167984, 0.693147180559945, 1.38629436111989, 2.39789527279837, 1.94591014905531, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.94591014905531, 2.63905732961526, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.79175946922805, 2.30258509299405, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 1.38629436111989, 2.19722457733622, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.30258509299405, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.94591014905531, 0.693147180559945, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 2.63905732961526, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0, 1.09861228866811, 1.79175946922805, 2.484906649788, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 0.693147180559945, 0, 2.07944154167984, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 2.39789527279837, 0, 1.79175946922805, 1.38629436111989, 2.07944154167984, 2.07944154167984, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0, 0, 2.19722457733622, 1.79175946922805, 1.38629436111989, 1.38629436111989, 0, 0, 1.79175946922805, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 2.39789527279837, 1.94591014905531, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 2.07944154167984, 0, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.56494935746154, 0.693147180559945, 1.38629436111989, 0, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 2.94443897916644, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.6094379124341, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.94591014905531, 0, 1.79175946922805, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 1.38629436111989, 2.19722457733622, 0, 0, 2.56494935746154, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 2.19722457733622, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.94591014905531, 1.09861228866811, 2.56494935746154, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 2.70805020110221, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0, 0, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.6094379124341, 2.07944154167984, 0, 0.693147180559945, 1.79175946922805, 0, 1.79175946922805, 1.09861228866811, 0, 0, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0, 1.6094379124341, 2.19722457733622, 1.79175946922805, 0, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.79175946922805, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0, 0, 0, 0, 2.07944154167984, 0.693147180559945, 0, 1.79175946922805, 2.30258509299405, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.38629436111989, 0, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.79175946922805, 0, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.6094379124341, 0, 2.19722457733622, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 2.07944154167984, 0, 1.6094379124341, 0, 1.6094379124341, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.09861228866811, 2.19722457733622, 0, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.94591014905531, 1.6094379124341, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 2.07944154167984, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 2.07944154167984, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0, 0, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 0, 0, 1.94591014905531, 0, 1.09861228866811, 1.09861228866811, 2.39789527279837, 0.693147180559945, 2.63905732961526, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.19722457733622, 1.38629436111989, 1.94591014905531, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.38629436111989, 0, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.94591014905531, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 1.94591014905531, 0, 1.94591014905531, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.94591014905531, 2.39789527279837, 1.6094379124341, 2.19722457733622, 1.79175946922805, 2.39789527279837, 2.07944154167984, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.07944154167984, 2.07944154167984, 1.09861228866811, 0, 2.30258509299405, 0.693147180559945, 0.693147180559945, 2.19722457733622, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 1.94591014905531, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 2.39789527279837, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0, 2.484906649788, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.09861228866811, 2.30258509299405, 0.693147180559945, 0.693147180559945, 0, 2.19722457733622, 1.38629436111989, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.94591014905531, 2.19722457733622, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0, 2.484906649788, 1.6094379124341, 2.07944154167984, 0, 0, 2.19722457733622, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.94591014905531, 2.19722457733622, 1.94591014905531, 2.07944154167984, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0.693147180559945, 2.19722457733622, 1.94591014905531, 1.94591014905531, 0, 1.79175946922805, 0, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0.693147180559945, 2.07944154167984, 2.07944154167984, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.94591014905531, 1.6094379124341, 0, 1.09861228866811, 2.39789527279837, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0, 1.38629436111989, 1.09861228866811, 1.94591014905531, 0, 0, 0.693147180559945, 1.38629436111989, 1.94591014905531, 0, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 2.07944154167984, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 2.484906649788, 1.09861228866811, 0, 2.484906649788, 2.30258509299405, 0, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.6094379124341, 2.56494935746154, 1.94591014905531, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 0, 1.94591014905531, 2.07944154167984, 2.07944154167984, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.38629436111989, 2.39789527279837, 0, 2.39789527279837, 1.38629436111989, 0, 2.19722457733622, 1.09861228866811, 0, 1.6094379124341, 2.30258509299405, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.6094379124341, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 2.39789527279837, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.07944154167984, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.09861228866811, 1.79175946922805, 1.94591014905531, 0, 0, 1.94591014905531, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 0, 1.6094379124341, 2.77258872223978, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 2.30258509299405, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.79175946922805, 0.693147180559945, 1.79175946922805, 0, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 0, 0.693147180559945, 0, 1.79175946922805, 1.94591014905531, 2.484906649788, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 0, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0.693147180559945, 2.19722457733622, 1.09861228866811, 2.19722457733622, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 0, 0, 1.38629436111989, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.07944154167984, 2.30258509299405, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.94591014905531, 1.38629436111989, 2.19722457733622, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.94591014905531, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 2.19722457733622, 2.30258509299405, 0, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.09861228866811, 0.693147180559945, 2.07944154167984, 1.6094379124341, 1.6094379124341, 0, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.09861228866811, 2.07944154167984, 2.07944154167984, 1.09861228866811, 0.693147180559945, 2.70805020110221, 0.693147180559945, 2.30258509299405, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.6094379124341, 0, 2.39789527279837, 1.09861228866811, 1.94591014905531, 1.09861228866811, 2.70805020110221, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.484906649788, 1.09861228866811, 0, 0.693147180559945, 2.19722457733622, 1.38629436111989, 1.94591014905531, 0, 0, 1.6094379124341, 1.6094379124341, 0, 0, 1.09861228866811, 0.693147180559945, 0, 2.30258509299405, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0, 2.07944154167984, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.79175946922805, 2.77258872223978, 1.38629436111989, 1.38629436111989, 1.6094379124341, 2.19722457733622, 1.79175946922805, 1.6094379124341, 0.693147180559945, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.38629436111989, 0, 0, 2.30258509299405, 0, 1.79175946922805, 1.6094379124341, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0, 0, 0, 1.38629436111989, 2.07944154167984, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 2.19722457733622, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.07944154167984, 2.83321334405622, 1.6094379124341, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0, 1.79175946922805, 0.693147180559945, 0, 1.38629436111989, 1.94591014905531, 1.6094379124341, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 0, 2.30258509299405, 1.6094379124341, 1.38629436111989, 1.09861228866811, 2.19722457733622, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.07944154167984, 1.79175946922805, 2.19722457733622, 1.94591014905531, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.79175946922805, 2.39789527279837, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0, 0, 1.09861228866811, 0.693147180559945, 1.94591014905531, 2.63905732961526, 1.79175946922805, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.39789527279837, 0, 2.30258509299405, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.94591014905531, 2.07944154167984, 0, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.30258509299405, 1.38629436111989, 0, 2.07944154167984, 1.38629436111989, 2.07944154167984, 1.6094379124341, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 1.38629436111989, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.38629436111989, 2.07944154167984, 0, 0, 2.07944154167984, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 2.07944154167984, 1.79175946922805, 1.38629436111989, 2.30258509299405, 0.693147180559945, 1.6094379124341, 2.07944154167984, 1.09861228866811, 2.07944154167984, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 2.07944154167984, 2.484906649788, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0, 1.79175946922805, 0, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.07944154167984, 1.79175946922805, 1.09861228866811, 1.09861228866811, 2.63905732961526, 1.6094379124341, 2.39789527279837, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0, 1.6094379124341, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 2.19722457733622, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0, 0, 1.6094379124341, 1.94591014905531, 0, 0, 0, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.38629436111989, 2.07944154167984, 0, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.63905732961526, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.09861228866811, 2.484906649788, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.94591014905531, 0.693147180559945, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.38629436111989, 3.58351893845611, 1.09861228866811, 1.09861228866811, 1.79175946922805, 0, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0, 0, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 2.39789527279837, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0, 2.484906649788, 0.693147180559945, 0, 1.38629436111989, 1.94591014905531, 0, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.94591014905531, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 0, 0, 0, 1.38629436111989, 1.38629436111989, 0, 0, 0.693147180559945, 2.07944154167984, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.94591014905531, 2.07944154167984, 0.693147180559945, 0, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0, 0, 1.6094379124341, 1.6094379124341, 0, 0.693147180559945, 0, 2.484906649788, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0, 1.79175946922805, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 1.79175946922805, 0, 1.79175946922805, 1.79175946922805, 0, 1.94591014905531, 0, 1.38629436111989, 1.79175946922805, 0, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 0, 0.693147180559945, 0, 0, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 0.693147180559945, 1.6094379124341, 1.79175946922805, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0, 0, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0, 0.693147180559945, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0, 0, 0, 2.39789527279837, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 1.6094379124341, 1.79175946922805, 2.07944154167984, 2.70805020110221, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 0, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 2.94443897916644, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 0, 0, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0.693147180559945, 0, 0, 0, 0.693147180559945, 1.79175946922805, 0, 0, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 0, 1.94591014905531, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 0, 0, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 2.30258509299405, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.19722457733622, 0, 2.19722457733622, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 0, 0.693147180559945, 0, 2.19722457733622, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 2.99573227355399, 0, 0, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.6094379124341, 2.07944154167984, 0, 0.693147180559945, 0, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 0, 1.09861228866811, 0, 1.6094379124341, 1.79175946922805, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 1.38629436111989, 1.38629436111989, 0, 1.6094379124341, 0, 1.38629436111989, 0.693147180559945, 0, 1.79175946922805, 0, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 0, 0, 1.09861228866811, 1.6094379124341, 0, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 0, 0, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 2.30258509299405, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0, 1.6094379124341, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0, 1.09861228866811, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.79175946922805, 0, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 0, 0, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 2.77258872223978, 1.6094379124341, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0, 0, 0, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 1.6094379124341, 0, 0, 0.693147180559945, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0, 0, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 0, 0, 2.07944154167984, 0, 0, 1.6094379124341, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0, 0, 2.07944154167984, 0, 0, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 1.79175946922805, 0, 2.30258509299405, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 1.09861228866811, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0, 1.6094379124341, 0, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 1.6094379124341, 0, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 2.19722457733622, 1.79175946922805, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 2.07944154167984, 1.09861228866811, 1.38629436111989, 0, 0, 0, 0, 1.09861228866811, 0, 2.07944154167984, 1.09861228866811, 0, 0, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0.693147180559945, 2.30258509299405, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0, 0, 1.09861228866811, 1.6094379124341, 0, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.94591014905531, 0, 1.94591014905531, 0.693147180559945, 0, 2.39789527279837, 2.77258872223978, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 0, 2.19722457733622, 1.09861228866811, 2.19722457733622, 0, 0, 1.6094379124341, 2.19722457733622, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 2.30258509299405, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0, 0, 0, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 2.63905732961526, 0, 0, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 2.19722457733622, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 0, 1.94591014905531, 0.693147180559945, 0, 0, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0, 2.19722457733622, 1.38629436111989, 2.30258509299405, 0, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 0, 1.38629436111989, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 0, 0, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0, 0, 0, 0, 0, 1.09861228866811, 0, 0, 1.6094379124341, 0, 1.79175946922805, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 1.38629436111989, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 0, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 0, 2.19722457733622, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 2.07944154167984, 1.09861228866811, 0, 0, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.79175946922805, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 2.07944154167984, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 0, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.484906649788, 1.09861228866811, 1.09861228866811, 2.30258509299405, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.94591014905531, 1.79175946922805, 1.79175946922805, 0, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0, 1.09861228866811, 0, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 2.19722457733622, 0, 0, 0.693147180559945, 2.30258509299405, 1.09861228866811, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 1.79175946922805, 1.6094379124341, 0, 0.693147180559945, 1.38629436111989, 0, 2.07944154167984, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 0, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 2.19722457733622, 0.693147180559945, 1.38629436111989, 0, 0, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0, 0, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0, 0, 1.79175946922805, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 2.83321334405622, 0, 0.693147180559945, 1.38629436111989, 0, 1.38629436111989, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 1.38629436111989, 0, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0, 1.94591014905531, 1.09861228866811, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 1.38629436111989, 2.07944154167984, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.38629436111989, 0, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 2.19722457733622, 1.94591014905531, 0, 1.38629436111989, 1.79175946922805, 2.39789527279837, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 2.56494935746154, 1.94591014905531, 1.38629436111989, 0, 0.693147180559945, 2.39789527279837, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 2.39789527279837, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.94591014905531, 2.07944154167984, 1.38629436111989, 0, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.94591014905531, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 2.63905732961526, 1.38629436111989, 0.693147180559945, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.94591014905531, 0.693147180559945, 0, 1.94591014905531, 1.09861228866811, 2.484906649788, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 2.07944154167984, 2.39789527279837, 2.30258509299405, 1.09861228866811, 1.38629436111989, 0.693147180559945, 2.484906649788, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.6094379124341, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0, 1.79175946922805, 2.30258509299405, 2.07944154167984, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0, 0, 2.56494935746154, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.07944154167984, 2.07944154167984, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 2.39789527279837, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.38629436111989, 0, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 2.39789527279837, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.6094379124341, 1.6094379124341, 0, 1.79175946922805, 2.30258509299405, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.63905732961526, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.79175946922805, 0, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.94591014905531, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 2.89037175789616, 1.94591014905531, 1.79175946922805, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0.693147180559945, 2.39789527279837, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0, 2.07944154167984, 1.09861228866811, 2.07944154167984, 2.07944154167984, 0.693147180559945, 2.07944154167984, 1.79175946922805, 3.09104245335832, 2.19722457733622, 0.693147180559945, 0.693147180559945, 2.30258509299405, 1.38629436111989, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0, 1.38629436111989, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.79175946922805, 2.63905732961526, 0.693147180559945, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.09861228866811, 2.19722457733622, 1.38629436111989, 0, 1.79175946922805, 1.79175946922805, 1.6094379124341, 0, 1.38629436111989, 2.30258509299405, 1.09861228866811, 1.09861228866811, 2.30258509299405, 0, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.39789527279837, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.6094379124341, 2.07944154167984, 1.09861228866811, 2.19722457733622, 2.30258509299405, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0, 1.6094379124341, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.6094379124341, 2.07944154167984, 1.79175946922805, 1.6094379124341, 1.79175946922805, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.79175946922805, 1.09861228866811, 0, 1.6094379124341, 0, 2.07944154167984, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.30258509299405, 0, 1.79175946922805, 2.07944154167984, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.09861228866811, 2.19722457733622, 0, 0, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.94591014905531, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.94591014905531, 0.693147180559945, 1.6094379124341, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.6094379124341, 2.07944154167984, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 2.70805020110221, 0, 0.693147180559945, 1.09861228866811, 0, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.79175946922805, 2.19722457733622, 1.79175946922805, 2.39789527279837, 2.30258509299405, 2.30258509299405, 1.09861228866811, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.94591014905531, 2.30258509299405, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.39789527279837, 1.09861228866811, 2.19722457733622, 0.693147180559945, 0.693147180559945, 0, 2.30258509299405, 0, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.39789527279837, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.6094379124341, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.79175946922805, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0, 2.39789527279837, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 1.38629436111989, 1.94591014905531, 0.693147180559945, 0, 2.39789527279837, 1.38629436111989, 0, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.94591014905531, 0.693147180559945, 0, 2.484906649788, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.6094379124341, 1.38629436111989, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 0, 1.79175946922805, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 2.39789527279837, 1.09861228866811, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.6094379124341, 2.39789527279837, 0, 1.09861228866811, 2.30258509299405, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.30258509299405, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.79175946922805, 1.94591014905531, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.07944154167984, 1.38629436111989, 0.693147180559945, 0.693147180559945, 2.07944154167984, 2.07944154167984, 0.693147180559945, 2.39789527279837, 0, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.6094379124341, 0.693147180559945, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 2.484906649788, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0, 2.07944154167984, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 0, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.94591014905531, 0, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 1.79175946922805, 1.38629436111989, 1.94591014905531, 0, 0.693147180559945, 1.6094379124341, 2.484906649788, 1.6094379124341, 1.09861228866811, 1.09861228866811, 2.30258509299405, 0, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 2.07944154167984, 2.19722457733622, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 1.38629436111989, 0, 1.6094379124341, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0, 1.79175946922805, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.39789527279837, 2.56494935746154, 0.693147180559945, 0, 0, 0, 1.79175946922805, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0, 1.79175946922805, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.6094379124341, 1.6094379124341, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 1.09861228866811, 0, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 2.39789527279837, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 0, 0, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 0, 2.30258509299405, 1.79175946922805, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 1.94591014905531, 0, 1.6094379124341, 0, 0.693147180559945, 0, 1.38629436111989, 2.19722457733622, 1.6094379124341, 0, 1.6094379124341, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0, 0, 2.39789527279837, 0.693147180559945, 0, 0, 1.09861228866811, 0, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 2.19722457733622, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0.693147180559945, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.94591014905531, 1.38629436111989, 0, 0, 1.38629436111989, 1.38629436111989, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 2.63905732961526, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.6094379124341, 0, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 1.38629436111989, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 0, 0, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0, 0, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 2.07944154167984, 0, 0.693147180559945, 0, 0, 2.19722457733622, 0, 0, 0, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0, 2.484906649788, 1.38629436111989, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 0, 0, 1.79175946922805, 1.09861228866811, 0, 0, 0, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 2.39789527279837, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 0, 0, 0, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.38629436111989, 2.30258509299405, 0, 0, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.94591014905531, 0, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.19722457733622, 0.693147180559945, 1.79175946922805, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 1.6094379124341, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 2.30258509299405, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.38629436111989, 0, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 0, 0, 1.6094379124341, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 0, 0.693147180559945, 0, 1.09861228866811, 2.484906649788, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.79175946922805, 0, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.30258509299405, 0, 1.09861228866811, 0.693147180559945, 0, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0, 2.07944154167984, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0, 0, 0, 0, 1.6094379124341, 0, 1.09861228866811, 2.07944154167984, 0, 0, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 1.38629436111989, 0, 1.79175946922805, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 0, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 0, 0, 0, 2.19722457733622, 0, 1.79175946922805, 1.38629436111989, 1.09861228866811, 0, 0, 0, 1.09861228866811, 2.39789527279837, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 1.38629436111989, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.6094379124341, 2.07944154167984, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 2.19722457733622, 0, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0.693147180559945, 2.30258509299405, 0.693147180559945, 1.6094379124341, 1.94591014905531, 0, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 1.09861228866811, 0, 0, 1.09861228866811, 2.63905732961526, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.6094379124341, 2.77258872223978, 1.38629436111989, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 0, 1.94591014905531, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0, 0, 0, 0.693147180559945, 1.6094379124341, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0, 0, 0, 0.693147180559945, }; - var targets = new double[] { 1, 7, 7, 2, 3, 7, 7, 7, 2, 7, 7, 7, 7, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 3, 7, 2, 7, 7, 7, 7, 7, 5, 3, 7, 7, 7, 7, 7, 7, 3, 7, 7, 6, 7, 7, 2, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 2, 2, 7, 7, 17, 7, 2, 7, 1, 7, 7, 7, 7, 7, 7, 17, 7, 7, 7, 7, 7, 17, 7, 2, 7, 7, 1, 7, 1, 7, 7, 17, 7, 7, 1, 7, 7, 7, 7, 7, 7, 7, 2, 2, 2, 2, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 0, 7, 1, 7, 1, 7, 7, 7, 7, 2, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 2, 2, 7, 2, 2, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 2, 2, 7, 2, 7, 1, 7, 2, 7, 7, 1, 7, 7, 7, 5, 2, 2, 7, 7, 2, 7, 7, 7, 7, 7, 17, 7, 2, 2, 1, 2, 7, 2, 7, 7, 2, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 2, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 2, 7, 7, 17, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 2, 7, 7, 2, 7, 14, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1, 7, 7, 7, 7, 7, 7, 2, 7, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 7, 7, 3, 2, 2, 7, 7, 2, 2, 2, 2, 7, 7, 1, 2, 2, 2, 2, 2, 2, 17, 17, 2, 7, 7, 2, 1, 2, 2, 2, 3, 2, 2, 2, 2, 2, 3, 3, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 3, 1, 2, 3, 2, 2, 3, 2, 3, 3, 2, 2, 2, 2, 2, 2, 1, 3, 3, 1, 3, 3, 1, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 3, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 1, 14, 2, 2, 2, 1, 3, 1, 2, 1, 2, 2, 2, 1, 1, 3, 2, 2, 1, 2, 1, 2, 7, 2, 2, 2, 1, 2, 4, 2, 2, 2, 3, 1, 3, 1, 3, 2, 3, 3, 2, 3, 2, 2, 2, 1, 2, 1, 2, 2, 3, 2, 2, 2, 1, 2, 2, 14, 2, 2, 2, 3, 3, 2, 1, 2, 3, 2, 2, 1, 3, 2, 2, 2, 2, 1, 1, 2, 2, 3, 2, 3, 3, 1, 1, 2, 2, 2, 1, 2, 3, 2, 3, 2, 2, 4, 14, 2, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 1, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 3, 3, 7, 3, 2, 2, 1, 1, 3, 2, 2, 2, 1, 2, 3, 2, 1, 2, 3, 2, 1, 2, 2, 1, 2, 2, 2, 2, 3, 1, 2, 3, 2, 3, 1, 2, 15, 2, 2, 2, 13, 3, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 1, 1, 2, 3, 2, 3, 2, 2, 2, 3, 3, 2, 3, 3, 2, 2, 2, 3, 3, 2, 2, 3, 2, 2, 3, 17, 3, 2, 2, 3, 3, 2, 2, 3, 2, 2, 3, 1, 2, 3, 3, 7, 2, 2, 3, 2, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 3, 3, 3, 1, 3, 2, 3, 1, 3, 2, 2, 3, 3, 2, 2, 3, 3, 3, 3, 3, 2, 3, 2, 3, 2, 3, 2, 1, 2, 2, 3, 2, 3, 1, 2, 3, 2, 2, 3, 2, 2, 1, 2, 3, 2, 2, 2, 2, 1, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 2, 1, 2, 1, 1, 13, 15, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 1, 2, 14, 1, 1, 14, 1, 16, 15, 2, 2, 3, 2, 2, 2, 2, 14, 2, 1, 2, 2, 2, 2, 14, 13, 5, 13, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 2, 13, 1, 14, 2, 1, 2, 1, 1, 2, 2, 1, 7, 2, 1, 1, 1, 1, 2, 2, 2, 3, 3, 1, 1, 2, 2, 2, 2, 14, 2, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 3, 2, 1, 2, 1, 14, 16, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 3, 2, 1, 2, 2, 14, 13, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 13, 14, 3, 2, 1, 3, 2, 2, 2, 6, 2, 2, 2, 1, 1, 14, 3, 13, 17, 1, 14, 2, 2, 1, 2, 3, 2, 3, 2, 2, 1, 1, 2, 2, 1, 2, 15, 3, 14, 1, 1, 2, 1, 1, 1, 1, 2, 2, 14, 1, 14, 1, 1, 2, 2, 1, 2, 2, 1, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 1, 1, 1, 16, 2, 2, 1, 2, 1, 14, 16, 14, 14, 14, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 3, 14, 13, 2, 14, 14, 1, 3, 1, 2, 14, 2, 1, 1, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 2, 1, 2, 2, 14, 2, 2, 1, 2, 2, 3, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 2, 2, 1, 2, 2, 2, 3, 1, 4, 2, 2, 2, 2, 1, 2, 1, 3, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 13, 1, 1, 2, 1, 13, 1, 2, 2, 2, 2, 2, 16, 1, 2, 1, 2, 1, 1, 2, 2, 3, 2, 1, 2, 1, 2, 1, 2, 1, 1, 2, 3, 2, 1, 2, 2, 1, 2, 2, 3, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 5, 2, 1, 1, 2, 3, 2, 2, 2, 2, 3, 3, 2, 2, 2, 1, 1, 3, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 2, 1, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 14, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 2, 2, 2, 1, 3, 2, 3, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2, 17, 2, 2, 2, 3, 2, 1, 2, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 3, 1, 1, 2, 2, 2, 2, 2, 2, 2, 14, 3, 1, 13, 2, 2, 1, 2, 1, 1, 2, 2, 1, 1, 6, 2, 1, 1, 2, 3, 1, 2, 7, 2, 2, 2, 1, 1, 1, 2, 2, 13, 2, 1, 2, 14, 1, 1, 1, 2, 3, 1, 2, 2, 2, 2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 2, 6, 1, 2, 2, 2, 1, 1, 2, 1, 2, 5, 1, 2, 1, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 7, 2, 1, 1, 2, 2, 1, 2, 2, 13, 2, 2, 2, 1, 2, 3, 2, 2, 2, 2, 2, 1, 3, 3, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 17, 14, 14, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 1, 1, 1, 2, 2, 1, 2, 3, 1, 1, 2, 2, 2, 15, 2, 2, 1, 1, 16, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 14, 2, 1, 3, 1, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 3, 1, 1, 1, 1, 2, 2, 1, 3, 2, 3, 2, 2, 1, 2, 3, 14, 3, 17, 3, 2, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 3, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 1, 2, 3, 2, 3, 3, 2, 2, 13, 2, 1, 1, 1, 3, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 3, 3, 2, 2, 1, 2, 2, 3, 1, 2, 3, 1, 2, 2, 3, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 3, 3, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 2, 1, 2, 3, 3, 2, 2, 1, 1, 2, 2, 1, 2, 1, 2, 2, 2, 3, 3, 2, 1, 2, 2, 3, 2, 3, 3, 2, 1, 7, 2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 2, 2, 3, 1, 2, 2, 2, 1, 3, 2, 2, 2, 2, 7, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 3, 2, 1, 2, 1, 2, 3, 2, 1, 2, 2, 2, 17, 2, 1, 1, 2, 1, 2, 2, 3, 2, 2, 3, 1, 1, 1, 2, 2, 2, 2, 1, 3, 3, 3, 2, 2, 2, 2, 1, 1, 3, 2, 2, 2, 2, 3, 2, 1, 7, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 3, 3, 3, 2, 1, 2, 2, 2, 3, 3, 2, 2, 3, 3, 2, 2, 2, 3, 3, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 16, 1, 2, 1, 2, 2, 2, 2, 2, 3, 2, 2, 1, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 3, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 2, 13, 2, 1, 1, 2, 1, 1, 1, 1, 2, 3, 13, 2, 2, 2, 2, 2, 3, 2, 2, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 1, 1, 2, 2, 14, 1, 3, 2, 14, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 13, 2, 3, 2, 2, 2, 2, 2, 2, 17, 1, 1, 3, 15, 1, 2, 2, 2, 2, 14, 2, 2, 2, 14, 2, 1, 2, 2, 2, 3, 1, 2, 2, 2, 2, 1, 2, 1, 13, 1, 2, 1, 2, 2, 16, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 1, 17, 2, 2, 2, 1, 1, 1, 2, 1, 3, 13, 14, 2, 3, 2, 3, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 17, 3, 2, 1, 3, 1, 3, 6, 3, 2, 4, 3, 2, 3, 2, 2, 14, 1, 2, 14, 1, 3, 2, 2, 1, 2, 2, 3, 1, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 1, 14, 2, 1, 2, 2, 3, 2, 3, 3, 3, 2, 3, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 17, 1, 2, 15, 2, 2, 1, 3, 2, 1, 2, 2, 3, 2, 2, 16, 2, 1, 1, 2, 2, 2, 3, 1, 1, 2, 3, 2, 2, 2, 2, 3, 1, 2, 2, 3, 2, 3, 16, 2, 2, 3, 3, 3, 1, 1, 2, 2, 3, 3, 2, 2, 2, 3, 2, 2, 3, 2, 15, 14, 3, 2, 2, 2, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 3, 3, 2, 2, 3, 7, 3, 2, 2, 2, 2, 3, 2, 2, 5, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 3, 3, 3, 3, 3, 4, 2, 2, 2, 2, 2, 3, 2, 3, 3, 2, 3, 2, 13, 2, 3, 3, 14, 3, 2, 3, 2, 13, 3, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 3, 2, 3, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 3, 2, 1, 3, 3, 2, 5, 2, 2, 3, 3, 2, 3, 1, 3, 3, 2, 3, 2, 2, 3, 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 2, 3, 2, 3, 3, 3, 3, 2, 2, 2, 3, 2, 2, 3, 2, 2, 3, 3, 3, 3, 3, 2, 3, 3, 3, 15, 2, 2, 2, 3, 3, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 2, 3, 2, 3, 3, 7, 1, 2, 2, 2, 3, 3, 2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 2, 13, 2, 3, 3, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 16, 2, 2, 2, 2, 2, 3, 2, 3, 3, 2, 2, 13, 2, 3, 2, 2, 2, 2, 2, 3, 3, 14, 2, 3, 2, 3, 3, 2, 3, 2, 2, 3, 13, 2, 2, 3, 2, 2, 2, 1, 2, 2, 3, 1, 14, 2, 2, 2, 2, 1, 2, 2, 1, 3, 2, 2, 2, 3, 3, 3, 2, 1, 2, 2, 14, 3, 3, 2, 3, 2, 2, 3, 2, 2, 16, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 13, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 2, 15, 2, 2, 15, 14, 13, 2, 3, 2, 2, 3, 3, 2, 2, 16, 2, 2, 2, 2, 2, 2, 3, 2, 14, 2, 2, 14, 13, 2, 2, 2, 3, 2, 14, 2, 2, 2, 1, 2, 13, 2, 2, 2, 2, 2, 3, 13, 2, 2, 2, 2, 2, 2, 2, 2, 13, 13, 6, 2, 3, 2, 14, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 13, 2, 2, 16, 2, 13, 13, 1, 2, 2, 3, 2, 2, 3, 3, 2, 2, 2, 2, 1, 3, 2, 3, 3, 3, 6, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 3, 2, 2, 3, 2, 3, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 3, 2, 3, 2, 2, 2, 2, 3, 2, 2, 2, 3, 3, 17, 2, 3, 3, 3, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 13, 3, 2, 2, 3, 2, 2, 3, 2, 14, 2, 2, 17, 17, 2, 2, 1, 2, 3, 1, 2, 1, 1, 15, 2, 2, 2, 3, 13, 2, 2, 13, 2, 2, 2, 3, 3, 2, 3, 2, 2, 13, 7, 3, 2, 3, 2, 14, 2, 3, 2, 2, 2, 13, 2, 3, 3, 2, 14, 14, 2, 2, 2, 3, 2, 2, 2, 2, 13, 2, 2, 2, 13, 2, 2, 2, 2, 2, 3, 2, 13, 2, 2, 2, 2, 2, 2, 14, 3, 2, 2, 2, 2, 17, 1, 3, 2, 2, 2, 13, 2, 3, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 2, 3, 2, 13, 2, 2, 2, 3, 3, 3, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 16, 2, 2, 4, 2, 2, 16, 16, 2, 2, 3, 2, 2, 2, 3, 13, 2, 2, 3, 2, 2, 2, 1, 2, 2, 3, 2, 3, 3, 2, 2, 2, 2, 2, 3, 3, 2, 4, 2, 2, 4, 3, 2, 2, 3, 2, 3, 2, 3, 3, 3, 2, 2, 3, 14, 2, 2, 2, 4, 2, 3, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 15, 3, 14, 2, 2, 2, 3, 2, 2, 1, 3, 3, 2, 3, 2, 2, 3, 3, 14, 3, 2, 13, 2, 2, 2, 3, 2, 3, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 3, 2, 2, 13, 3, 2, 2, 3, 2, 3, 2, 2, 3, 3, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 13, 2, 13, 2, 1, 1, 14, 3, 2, 2, 2, 1, 13, 2, 2, 13, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 14, 2, 2, 13, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 14, 2, 2, 13, 2, 2, 2, 3, 2, 2, 2, 2, 3, 1, 1, 2, 3, 2, 2, 2, 2, 3, 3, 2, 13, 2, 14, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 1, 2, 2, 2, 3, 2, 2, 1, 1, 1, 3, 2, 3, 2, 2, 3, 2, 3, 2, 3, 2, 2, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 2, 1, 2, 1, 3, 2, 2, 1, 2, 2, 2, 2, 13, 13, 2, 2, 1, 1, 2, 1, 3, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 1, 2, 2, 3, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 1, 2, 1, 2, 2, 2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 3, 2, 3, 3, 2, 1, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 1, 1, 1, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 3, 2, 3, 2, 2, 14, 15, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 17, 2, 2, 1, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 14, 2, 2, 2, 13, 2, 3, 2, 2, 1, 2, 13, 1, 2, 1, 2, 2, 1, 2, 2, 13, 2, 3, 3, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 14, 2, 17, 13, 1, 3, 2, 3, 2, 1, 3, 3, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 14, 13, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 1, 2, 3, 2, 16, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 3, 2, 2, 1, 2, 2, 2, 2, 2, 3, 1, 13, 1, 2, 1, 2, 2, 13, 1, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 3, 14, 3, 2, 1, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 2, 2, 1, 2, 3, 1, 2, 3, 2, 1, 1, 2, 13, 2, 2, 2, 2, 3, 3, 2, 2, 1, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 3, 16, 2, 2, 1, 2, 2, 2, 2, 3, 1, 3, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 14, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 1, 2, 2, 2, 1, 2, 1, 2, 2, 2, 1, 2, 3, 2, 1, 2, 2, 1, 2, 2, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 1, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 13, 2, 2, 2, 2, 2, 1, 14, 2, 2, 2, 13, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 13, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 13, 2, 2, 2, 14, 1, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 13, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 3, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 3, 2, 1, 1, 2, 2, 1, 2, 1, 1, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 14, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2, 2, 2, 13, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 14, 1, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 1, 13, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2, 2, 2, 1, 1, 2, 1, 1, 13, 7, 2, 1, 1, 2, 1, 1, 1, 2, 2, 7, 3, 1, 1, 1, 3, 2, 2, 2, 3, 2, 2, 2, 1, 2, 2, 2, 2, 13, 3, 7, 2, 7, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 2, 14, 5, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 4, 2, 2, 1, 2, 1, 2, 2, 2, 17, 1, 1, 1, 1, 1, 2, 3, 2, 2, 1, 1, 2, 2, 1, 1, 1, 2, 1, 1, 1, 6, 1, 2, 3, 2, 2, 2, 3, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 7, 17, 1, 3, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 1, 1, 2, 14, 2, 2, 1, 7, 14, 1, 2, 1, 1, 3, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 3, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 13, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 1, 7, 1, 1, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 2, 4, 3, 2, 3, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 2, 3, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 3, 1, 1, 2, 2, 2, 1, 1, 2, 7, 7, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 7, 2, 1, 2, 1, 2, 3, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 3, 2, 2, 3, 2, 2, 1, 2, 3, 2, 2, 2, 2, 3, 1, 2, 2, 1, 2, 1, 1, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 17, 2, 14, 2, 2, 2, 2, 14, 2, 2, 2, 3, 2, 1, 2, 2, 2, 2, 3, 3, 1, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 2, 3, 1, 7, 2, 3, 2, 2, 2, 2, 2, 7, 2, 2, 3, 2, 4, 2, 2, 3, 2, 3, 2, 2, 3, 7, 2, 2, 2, 5, 3, 2, 2, 2, 2, 2, 2, 14, 3, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 3, 2, 1, 2, 1, 2, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 2, 1, 1, 7, 1, 1, 2, 2, 17, 2, 2, 1, 1, 2, 17, 2, 1, 13, 1, 17, 7, 2, 1, 2, 1, 13, 2, 1, 2, 2, 2, 1, 1, 2, 14, 2, 2, 17, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 14, 2, 7, 1, 4, 2, 17, 13, 7, 2, 1, 2, 2, 2, 1, 2, 1, 1, 7, 13, 7, 1, 2, 13, 1, 2, 2, 2, 7, 1, 2, 2, 2, 14, 1, 2, 2, 1, 7, 2, 1, 2, 2, 2, 2, 2, 14, 2, 3, 1, 1, 2, 2, 14, 2, 2, 2, 7, 2, 17, 1, 14, 2, 2, 2, 2, 17, 2, 3, 3, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 1, 2, 7, 7, 13, 1, 2, 2, 2, 3, 1, 3, 2, 5, 14, 7, 2, 1, 1, 7, 1, 1, 7, 1, 1, 2, 2, 17, 1, 2, 7, 7, 2, 1, 1, 13, 7, 1, 2, 1, 1, 1, 2, 2, 1, 17, 7, 1, 1, 1, 2, 7, 1, 1, 17, 1, 17, 1, 2, 1, 1, 1, 2, 13, 13, 2, 15, 7, 2, 7, 7, 2, 2, 7, 2, 1, 0, 2, 2, 14, 2, 1, 2, 1, 2, 1, 14, 14, 3, 3, 17, 2, 1, 1, 1, 1, 1, 1, 3, 2, 1, 14, 2, 1, 2, 1, 1, 3, 3, 1, 2, 1, 2, 1, 3, 3, 2, 1, 2, 2, 3, 2, 2, 3, 2, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 13, 14, 1, 3, 2, 17, 1, 2, 2, 2, 13, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2, 15, 2, 2, 1, 1, 2, 2, 4, 2, 1, 2, 1, 2, 2, 3, 2, 2, 2, 2, 1, 2, 2, 3, 2, 3, 1, 2, 2, 2, 1, 2, 7, 2, 1, 1, 3, 2, 14, 2, 2, 2, 2, 3, 1, 1, 1, 13, 1, 2, 2, 3, 14, 12, 14, 2, 2, 1, 2, 1, 1, 15, 1, 2, 3, 2, 3, 14, 1, 2, 2, 1, 2, 7, 2, 1, 2, 1, 1, 2, 2, 1, 2, 13, 3, 2, 2, 1, 2, 1, 2, 3, 2, 3, 1, 1, 3, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 13, 2, 1, 12, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 14, 1, 2, 3, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 13, 3, 3, 3, 2, 3, 2, 1, 3, 1, 2, 2, 7, 2, 1, 2, 1, 2, 2, 3, 2, 14, 3, 2, 2, 1, 2, 2, 2, 3, 2, 2, 2, 3, 1, 1, 2, 14, 1, 2, 1, 1, 2, 2, 2, 2, 2, 15, 2, 3, 3, 2, 2, 3, 1, 1, 2, 2, 2, 3, 2, 14, 2, 1, 3, 3, 2, 2, 3, 2, 2, 2, 3, 4, 2, 2, 2, 12, 15, 2, 14, 14, 1, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 1, 2, 3, 7, 2, 2, 13, 2, 13, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 13, 2, 3, 14, 3, 2, 1, 14, 3, 2, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 3, 2, 2, 3, 2, 1, 14, 2, 14, 2, 2, 3, 2, 2, 1, 2, 2, 2, 2, 14, 2, 2, 2, 2, 1, 3, 2, 1, 1, 2, 2, 2, 2, 14, 2, 2, 14, 13, 2, 1, 3, 2, 1, 2, 3, 1, 1, 3, 2, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 7, 2, 1, 2, 2, 7, 2, 2, 1, 14, 1, 3, 13, 2, 2, 1, 15, 2, 2, 2, 2, 2, 2, 1, 1, 13, 2, 2, 4, 2, 3, 2, 3, 1, 2, 2, 2, 2, 1, 2, 1, 1, 1, 1, 1, 3, 1, 1, 3, 14, 3, 2, 1, 2, 2, 2, 1, 2, 1, 2, 13, 1, 2, 2, 2, 2, 1, 2, 14, 1, 2, 2, 1, 2, 1, 1, 1, 3, 2, 2, 2, 13, 2, 1, 2, 13, 2, 2, 2, 3, 1, 1, 1, 1, 1, 14, 3, 1, 2, 2, 3, 2, 3, 2, 1, 2, 1, 14, 2, 1, 2, 1, 2, 2, 2, 2, 13, 14, 2, 13, 1, 2, 2, 2, 2, 4, 1, 1, 2, 14, 1, 2, 1, 3, 2, 1, 1, 1, 2, 2, 2, 6, 2, 2, 2, 14, 2, 2, 14, 2, 2, 1, 1, 1, 1, 2, 17, 2, 1, 2, 1, 3, 12, 1, 2, 2, 2, 1, 3, 1, 1, 2, 1, 2, 1, 2, 1, 1, 1, 14, 2, 13, 13, 1, 2, 13, 1, 1, 1, 2, 14, 1, 3, 3, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 13, 1, 1, 1, 12, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 6, 1, 13, 2, 1, 13, 13, 2, 1, 1, 1, 1, 1, 13, 3, 2, 2, 2, 2, 1, 2, 1, 2, 6, 1, 1, 3, 1, 17, 1, 12, 2, 3, 2, 1, 2, 1, 1, 1, 2, 3, 13, 3, 3, 3, 2, 1, 13, 1, 1, 14, 7, 2, 17, 1, 1, 1, 1, 14, 1, 12, 2, 7, 1, 12, 2, 2, 1, 2, 14, 13, 1, 3, 13, 2, 1, 2, 1, 2, 2, 1, 14, 2, 3, 1, 1, 17, 1, 1, 13, 1, 2, 2, 1, 13, 1, 3, 17, 1, 1, 1, 2, 13, 1, 1, 14, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 2, 17, 1, 2, 1, 17, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 3, 1, 13, 1, 1, 2, 1, 2, 1, 1, 1, 2, 1, 16, 6, 2, 1, 2, 1, 1, 17, 1, 3, 1, 2, 2, 2, 1, 2, 1, 2, 1, 3, 1, 2, 2, 17, 2, 1, 3, 1, 2, 1, 1, 12, 13, 2, 2, 2, 1, 1, 2, 2, 1, 2, 1, 2, 2, 1, 12, 13, 3, 1, 1, 1, 2, 17, 2, 2, 1, 1, 1, 1, 1, 12, 1, 2, 2, 1, 1, 1, 13, 1, 1, 1, 1, 13, 1, 14, 1, 1, 1, 1, 1, 1, 2, 2, 13, 1, 2, 1, 2, 2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 2, 1, 1, 1, 1, 2, 14, 17, 2, 2, 2, 17, 1, 2, 2, 13, 1, 1, 2, 13, 2, 2, 1, 1, 2, 1, 1, 2, 1, 2, 2, 1, 1, 1, 1, 1, 1, 17, 1, 17, 3, 3, 2, 2, 1, 13, 1, 13, 12, 2, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 2, 2, 12, 1, 17, 2, 12, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 7, 2, 2, 1, 2, 2, 1, 3, 2, 1, 1, 1, 2, 2, 17, 3, 1, 2, 2, 2, 1, 3, 17, 2, 1, 2, 1, 2, 1, 2, 1, 1, 1, 2, 12, 17, 1, 1, 2, 1, 1, 1, 2, 13, 2, 2, 1, 1, 15, 2, 17, 1, 14, 1, 1, 1, 3, 1, 2, 2, 2, 2, 2, 3, 1, 1, 2, 1, 1, 1, 2, 1, 13, 7, 1, 1, 17, 2, 14, 2, 1, 1, 2, 4, 1, 2, 1, 1, 14, 2, 2, 1, 2, 17, 17, 2, 1, 1, 2, 17, 17, 17, 2, 1, 1, 2, 3, 2, 2, 13, 3, 13, 1, 17, 17, 17, 17, 2, 7, 2, 1, 17, 1, 2, 1, 15, 3, 13, 1, 3, 1, 2, 12, 15, 3, 2, 1, 1, 3, 2, 1, 2, 1, 2, 17, 2, 1, 3, 1, 1, 3, 1, 1, 1, 2, 15, 17, 1, 13, 12, 2, 2, 1, 1, 13, 1, 1, 1, 17, 1, 1, 1, 2, 12, 2, 1, 1, 1, 1, 17, 1, 17, 1, 2, 12, 17, 17, 2, 3, 14, 15, 2, 14, 17, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 14, 1, 1, 17, 2, 2, 12, 1, 1, 1, 1, 1, 1, 1, 14, 17, 1, 1, 2, 1, 17, 3, 1, 1, 2, 13, 3, 1, 1, 1, 1, 1, 13, 1, 1, 1, 13, 1, 2, 1, 3, 2, 13, 7, 1, 14, 14, 14, 13, 17, 13, 1, 3, 1, 17, 17, 13, 1, 1, 1, 12, 17, 1, 1, 13, 15, 1, 2, 1, 1, 1, 1, 2, 1, 1, 12, 12, 13, 1, 1, 2, 12, 12, 1, 3, 17, 2, 12, 17, 1, 1, 3, 2, 1, 3, 12, 2, 2, 3, 2, 14, 14, 1, 1, 2, 12, 1, 12, 12, 14, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 13, 13, 17, 2, 1, 1, 1, 2, 1, 15, 2, 17, 2, 2, 1, 1, 2, 1, 14, 2, 1, 2, 1, 1, 3, 2, 3, 1, 2, 3, 2, 1, 2, 17, 1, 2, 2, 1, 2, 2, 3, 15, 1, 3, 13, 3, 1, 1, 2, 3, 14, 1, 13, 2, 2, 1, 14, 2, 1, 2, 2, 1, 17, 12, 12, 1, 1, 2, 1, 1, 1, 2, 2, 13, 13, 1, 17, 1, 2, 17, 1, 1, 1, 1, 1, 1, 2, 14, 1, 1, 3, 13, 12, 12, 2, 1, 1, 2, 2, 3, 3, 1, 3, 1, 3, 1, 1, 2, 2, 13, 1, 2, 2, 12, 12, 2, 2, 12, 17, 1, 17, 2, 12, 17, 17, 1, 17, 12, 17, 1, 17, 2, 1, 13, 2, 2, 1, 1, 2, 2, 1, 1, 17, 2, 3, 17, 3, 1, 1, 1, 2, 1, 12, 3, 2, 2, 12, 17, 17, 12, 12, 13, 2, 3, 17, 2, 1, 3, 2, 2, 3, 17, 17, 12, 2, 3, 2, 2, 2, 3, 1, 3, 2, 1, 12, 2, 2, 3, 2, 3, 1, 2, 2, 17, 12, 7, 12, 12, 1, 1, 1, 2, 2, 1, 1, 1, 12, 13, 17, 17, 1, 17, 1, 1, 1, 1, 13, 1, 2, 17, 2, 1, 2, 2, 2, 2, 2, 12, 3, 1, 1, 2, 2, 2, 2, 1, 1, 13, 14, 1, 1, 2, 2, 2, 13, 17, 2, 1, 2, 15, 2, 2, 1, 2, 13, 1, 2, 1, 2, 17, 2, 1, 2, 2, 1, 6, 13, 1, 1, 1, 1, 2, 1, 1, 2, 1, 17, 2, 2, 13, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 2, 2, 3, 1, 1, 17, 1, 3, 1, 2, 17, 2, 2, 1, 1, 2, 13, 1, 1, 1, 1, 1, 2, 2, 1, 2, 13, 13, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 1, 1, 1, 2, 1, 2, 2, 2, 1, 2, 2, 13, 2, 1, 1, 1, 2, 14, 5, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 13, 1, 2, 2, 2, 2, 2, 2, 1, 1, 3, 2, 4, 2, 2, 1, 15, 2, 2, 1, 12, 1, 3, 1, 14, 1, 2, 1, 1, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 1, 1, 2, 1, 2, 3, 3, 2, 2, 1, 2, 2, 1, 17, 1, 2, 15, 2, 1, 13, 2, 1, 1, 3, 2, 1, 14, 2, 1, 2, 2, 1, 2, 14, 1, 2, 16, 2, 2, 2, 1, 1, 5, 1, 2, 1, 3, 2, 2, 2, 2, 13, 2, 5, 2, 1, 2, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 2, 3, 2, 13, 3, 2, 2, 2, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 1, 12, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 3, 2, 2, 1, 13, 1, 1, 1, 1, 3, 1, 15, 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 13, 2, 14, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 3, 1, 2, 2, 17, 13, 1, 2, 1, 2, 1, 2, 1, 1, 2, 1, 2, 2, 1, 2, 2, 1, 17, 2, 1, 1, 3, 2, 2, 1, 2, 1, 1, 2, 2, 1, 2, 1, 1, 13, 2, 12, 2, 1, 15, 2, 1, 3, 1, 1, 13, 1, 2, 1, 7, 2, 1, 1, 2, 1, 14, 13, 1, 2, 1, 17, 2, 1, 1, 2, 2, 1, 1, 2, 2, 1, 2, 13, 2, 1, 3, 1, 1, 12, 1, 17, 1, 1, 1, 13, 1, 2, 1, 1, 1, 1, 2, 16, 1, 6, 1, 3, 1, 2, 1, 1, 12, 1, 12, 1, 1, 1, 1, 1, 1, 6, 1, 1, 17, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 13, 1, 1, 1, 1, 12, 1, 1, 12, 17, 1, 1, 1, 1, 1, 2, 1, 1, 12, 1, 12, 1, 1, 1, 1, 13, 1, 1, 1, 1, 12, 2, 13, 1, 1, 17, 1, 1, 1, 1, 17, 1, 1, 12, 2, 1, 1, 1, 1, 1, 1, 12, 1, 1, 2, 13, 1, 1, 17, 1, 1, 13, 1, 12, 1, 1, 15, 2, 14, 12, 1, 1, 12, 1, 6, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 15, 1, 1, 12, 1, 1, 2, 14, 1, 1, 1, 1, 12, 1, 13, 15, 13, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 12, 1, 2, 1, 1, 1, 1, 2, 2, 1, 13, 1, 1, 1, 1, 13, 1, 1, 1, 1, 2, 1, 12, 1, 12, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 12, 13, 1, 1, 1, 15, 1, 17, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 13, 1, 17, 1, 1, 1, 15, 12, 1, 14, 1, 1, 14, 1, 1, 1, 1, 1, 1, 1, 1, 13, 1, 13, 1, 7, 1, 12, 14, 1, 13, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 14, 1, 1, 15, 1, 1, 12, 1, 13, 1, 1, 1, 1, 14, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 1, 12, 1, 1, 1, 13, 15, 13, 1, 1, 12, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 1, 17, 16, 1, 1, 1, 1, 1, 1, 1, 12, 14, 1, 13, 1, 12, 1, 14, 1, 1, 1, 1, 1, 1, 14, 2, 1, 1, 1, 1, 1, 1, 1, 1, 12, 12, 1, 1, 1, 1, 1, 1, 14, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 13, 1, 14, 1, 13, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 13, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 1, 17, 6, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 1, 1, 13, 12, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 12, 1, 1, 1, 1, 1, 1, 12, 1, 12, 1, 1, 1, 1, 15, 1, 6, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 6, 1, 1, 2, 1, 2, 7, 1, 17, 2, 1, 1, 1, 1, 1, 2, 12, 2, 2, 2, 1, 14, 2, 17, 15, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 15, 17, 7, 2, 17, 17, 2, 1, 3, 1, 2, 2, 17, 13, 14, 1, 2, 1, 1, 2, 14, 2, 13, 1, 1, 1, 15, 7, 1, 2, 1, 1, 2, 1, 3, 1, 1, 17, 1, 13, 17, 13, 17, 3, 1, 1, 14, 2, 2, 2, 2, 14, 2, 2, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 17, 1, 1, 17, 1, 2, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 3, 1, 17, 1, 1, 13, 17, 1, 2, 1, 2, 2, 1, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 13, 1, 12, 1, 17, 1, 2, 1, 3, 1, 1, 2, 2, 1, 1, 2, 1, 17, 14, 1, 2, 2, 2, 1, 17, 17, 13, 1, 1, 1, 2, 17, 2, 2, 2, 1, 2, 2, 17, 1, 2, 1, 2, 1, 2, 2, 1, 2, 1, 1, 2, 1, 1, 2, 2, 1, 3, 1, 1, 14, 1, 1, 1, 14, 1, 1, 1, 1, 1, 13, 17, 2, 1, 2, 2, 1, 2, 17, 1, 1, 1, 1, 14, 1, 2, 1, 1, 17, 2, 1, 1, 1, 1, 17, 1, 1, 1, 2, 1, 2, 1, 2, 2, 2, 7, 1, 1, 17, 17, 2, 2, 1, 13, 2, 1, 1, 1, 1, 1, 1, 7, 17, 1, 2, 1, 1, 1, 2, 1, 1, 2, 1, 1, 1, 2, 1, 1, 13, 1, 1, 2, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 2, 1, 12, 1, 3, 2, 2, 2, 1, 3, 2, 15, 1, 2, 1, 3, 13, 1, 1, 2, 1, 1, 1, 7, 7, 1, 2, 1, 1, 1, 1, 1, 15, 14, 1, 2, 17, 1, 3, 2, 12, 13, 1, 13, 17, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 7, 17, 1, 2, 15, 1, 2, 7, 1, 1, 2, 1, 17, 1, 1, 17, 2, 13, 1, 1, 1, 2, 2, 13, 1, 1, 1, 1, 1, 17, 1, 2, 2, 14, 1, 13, 1, 1, 2, 1, 1, 1, 13, 1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 3, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 17, 1, 2, 1, 1, 3, 1, 1, 2, 2, 3, 1, 1, 2, 2, 1, 2, 1, 2, 2, 3, 1, 2, 1, 3, 2, 2, 1, 1, 1, 13, 1, 1, 1, 1, 2, 1, 1, 1, 3, 3, 1, 1, 1, 2, 1, 1, 2, 2, 2, 2, 1, 1, 13, 1, 1, 7, 2, 2, 1, 2, 13, 2, 1, 1, 2, 1, 1, 15, 1, 2, 12, 2, 1, 13, 1, 1, 13, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 13, 2, 1, 1, 1, 2, 1, 2, 1, 13, 13, 2, 1, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 13, 2, 1, 1, 2, 15, 1, 2, 1, 1, 2, 1, 1, 13, 1, 1, 2, 13, 1, 1, 2, 1, 1, 2, 1, 1, 1, 2, 1, 13, 2, 2, 2, 15, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 13, 2, 1, 1, 7, 2, 2, 1, 3, 2, 2, 13, 1, 13, 1, 15, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 2, 2, 2, 1, 2, 15, 2, 1, 1, 2, 1, 1, 1, 2, 1, 2, 3, 1, 13, 2, 2, 15, 2, 13, 1, 13, 2, 2, 2, 3, 1, 2, 1, 15, 2, 13, 1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 7, 2, 2, 1, 1, 2, 15, 3, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 17, 2, 2, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 3, 13, 1, 1, 1, 1, 1, 2, 1, 1, 13, 2, 1, 1, 2, 1, 1, 1, 15, 1, 1, 2, 1, 1, 2, 1, 14, 1, 14, 2, 13, 1, 2, 2, 2, 2, 2, 1, 2, 13, 1, 2, 1, 2, 1, 1, 1, 2, 1, 2, 14, 1, 1, 1, 2, 3, 1, 2, 1, 2, 2, 1, 1, 1, 1, 2, 1, 12, 1, 13, 12, 3, 14, 1, 1, 1, 1, 1, 2, 14, 1, 1, 1, 1, 2, 1, 1, 2, 1, 2, 1, 1, 3, 2, 1, 1, 1, 1, 17, 2, 2, 1, 2, 1, 1, 1, 2, 3, 3, 3, 1, 2, 2, 2, 3, 2, 3, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2, 2, 1, 1, 1, 2, 1, 3, 2, 1, 1, 2, 1, 1, 1, 1, 3, 1, 1, 2, 3, 2, 3, 3, 2, 2, 1, 1, 2, 2, 2, 1, 1, 1, 2, 2, 1, 2, 1, 13, 1, 1, 3, 3, 2, 3, 12, 13, 1, 2, 1, 1, 13, 1, 13, 14, 2, 1, 2, 3, 3, 1, 1, 1, 1, 1, 13, 1, 1, 2, 1, 1, 1, 1, 1, 13, 1, 1, 15, 13, 1, 2, 1, 2, 1, 1, 2, 1, 1, 1, 1, 13, 2, 1, 1, 2, 2, 1, 13, 14, 1, 1, 2, 1, 1, 13, 2, 12, 13, 13, 2, 13, 1, 1, 1, 13, 1, 2, 1, 1, 1, 1, 2, 1, 13, 2, 1, 1, 1, 13, 13, 1, 2, 17, 2, 1, 1, 1, 1, 14, 1, 1, 2, 1, 12, 2, 1, 2, 1, 17, 1, 1, 1, 13, 1, 2, 17, 1, 2, 13, 1, 1, 14, 17, 1, 13, 2, 1, 2, 1, 1, 1, 12, 1, 17, 3, 2, 5, 13, 1, 1, 1, 7, 13, 1, 1, 2, 2, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 13, 2, 1, 1, 14, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 17, 1, 1, 1, 2, 1, 13, 1, 2, 1, 17, 14, 1, 1, 1, 2, 1, 12, 1, 13, 1, 2, 1, 1, 1, 2, 13, 1, 3, 12, 17, 1, 2, 1, 1, 1, 12, 2, 12, 13, 1, 17, 2, 2, 2, 1, 1, 1, 7, 1, 1, 2, 2, 1, 1, 2, 2, 1, 2, 13, 1, 1, 1, 15, 2, 17, 1, 2, 13, 1, 1, 13, 17, 1, 1, 1, 2, 14, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 14, 1, 17, 1, 2, 2, 2, 2, 14, 17, 17, 17, 17, 2, 17, 1, 1, 2, 2, 13, 12, 1, 13, 1, 1, 2, 1, 2, 13, 12, 2, 1, 1, 17, 1, 1, 1, 1, 3, 1, 17, 1, 1, 2, 1, 2, 1, 1, 17, 2, 1, 17, 14, 17, 1, 2, 1, 14, 1, 3, 1, 13, 2, 1, 1, 2, 17, 1, 13, 1, 1, 17, 1, 1, 1, 13, 2, 14, 17, 1, 2, 17, 2, 2, 17, 17, 1, 17, 17, 14, 17, 1, 1, 2, 14, 17, 1, 2, 2, 17, 1, 17, 1, 17, 2, 17, 17, 13, 17, 17, 1, 2, 1, 17, 1, 13, 17, 13, 17, 2, 13, 17, 17, 1, 17, 17, 2, 1, 13, 1, 2, 17, 14, 2, 1, 1, 2, 17, 1, 2, 2, 17, 12, 17, 2, 17, 13, 1, 2, 13, 2, 14, 2, 3, 17, 15, 14, 1, 12, 1, 2, 1, 2, 1, 17, 2, 13, 13, 1, 1, 17, 2, 1, 1, 1, 1, 2, 12, 14, 14, 14, 1, 2, 17, 13, 15, 1, 1, 13, 1, 14, 14, 2, 14, 1, 2, 1, 2, 2, 2, 1, 1, 16, 17, 1, 2, 12, 15, 1, 1, 1, 2, 2, 17, 2, 13, 17, 17, 1, 17, 17, 17, 12, 1, 17, 1, 7, 17, 17, 2, 2, 1, 1, 17, 17, 1, 2, 2, 17, 2, 17, 2, 14, 17, 1, 12, 17, 7, 17, 2, 13, 13, 1, 1, 17, 1, 17, 17, 17, 1, 7, 17, 1, 13, 1, 2, 2, 13, 13, 1, 12, 1, 1, 12, 12, 12, 12, 17, 1, 12, 2, 3, 14, 14, 2, 2, 15, 1, 1, 17, 2, 13, 13, 2, 14, 2, 5, 2, 1, 1, 14, 2, 1, 1, 13, 2, 3, 1, 1, 12, 1, 13, 1, 1, 2, 1, 13, 1, 2, 1, 12, 12, 1, 1, 2, 2, 1, 2, 2, 14, 14, 2, 2, 2, 2, 1, 2, 2, 15, 13, 6, 1, 1, 1, 1, 12, 2, 12, 14, 1, 1, 1, 1, 12, 1, 2, 2, 1, 12, 1, 1, 2, 1, 1, 1, 12, 1, 1, 12, 15, 1, 14, 1, 1, 1, 2, 13, 14, 1, 1, 6, 1, 12, 1, 1, 15, 1, 2, 2, 12, 12, 1, 1, 2, 2, 12, 1, 1, 1, 1, 1, 13, 1, 1, 12, 1, 1, 2, 2, 17, 12, 2, 15, 1, 1, 14, 2, 1, 17, 1, 13, 1, 12, 12, 2, 1, 1, 1, 1, 1, 15, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 13, 1, 1, 1, 12, 1, 15, 2, 12, 1, 15, 1, 1, 1, 1, 1, 2, 2, 2, 14, 1, 2, 3, 1, 2, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 3, 1, 2, 2, 1, 1, 1, 2, 15, 1, 2, 15, 2, 12, 2, 13, 1, 1, 17, 13, 1, 12, 2, 1, 1, 1, 1, 1, 14, 1, 12, 2, 1, 1, 5, 2, 2, 13, 13, 14, 1, 15, 2, 2, 2, 13, 2, 1, 2, 5, 1, 1, 1, 2, 1, 1, 1, 1, 2, 2, 1, 3, 1, 1, 2, 1, 2, 2, 15, 12, 1, 1, 1, 1, 17, 1, 2, 2, 3, 1, 1, 2, 2, 2, 17, 1, 1, 1, 1, 12, 1, 2, 3, 17, 1, 1, 1, 3, 1, 15, 1, 12, 12, 2, 2, 2, 1, 1, 2, 1, 1, 1, 14, 2, 2, 15, 1, 2, 2, 1, 1, 15, 1, 1, 1, 1, 1, 2, 1, 1, 17, 2, 1, 12, 1, 2, 1, 1, 3, 1, 1, 1, 14, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 17, 1, 1, 1, 1, 1, 12, 12, 2, 1, 1, 1, 1, 13, 2, 13, 2, 1, 2, 1, 2, 1, 1, 6, 1, 2, 1, 1, 14, 2, 15, 1, 6, 1, 2, 2, 1, 6, 1, 1, }; + var feature = new double[] { 0, 0.693147180559945, 0, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.09861228866811, 0, 0, 0, 1.09861228866811, 0, 0, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 2.07944154167984, 0, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 1.6094379124341, 0, 1.79175946922805, 1.38629436111989, 1.6094379124341, 0, 0, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 0, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 0, 2.19722457733622, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.79175946922805, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0, 0, 0, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 0, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0, 0, 1.6094379124341, 1.38629436111989, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.94591014905531, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0, 2.56494935746154, 0.693147180559945, 0, 0, 0, 0, 0, 1.6094379124341, 0, 0, 0, 0, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0, 2.56494935746154, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 2.07944154167984, 2.30258509299405, 1.38629436111989, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0, 0, 0, 0, 0, 1.09861228866811, 1.09861228866811, 0, 2.19722457733622, 2.30258509299405, 2.70805020110221, 2.07944154167984, 1.94591014905531, 2.39789527279837, 2.30258509299405, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 2.77258872223978, 2.19722457733622, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.09861228866811, 2.19722457733622, 2.30258509299405, 1.79175946922805, 2.07944154167984, 1.94591014905531, 1.38629436111989, 2.19722457733622, 3.04452243772342, 2.07944154167984, 2.19722457733622, 2.07944154167984, 2.484906649788, 0.693147180559945, 1.94591014905531, 1.6094379124341, 2.56494935746154, 1.38629436111989, 2.63905732961526, 1.94591014905531, 2.19722457733622, 1.94591014905531, 1.09861228866811, 1.38629436111989, 1.79175946922805, 2.99573227355399, 0, 2.484906649788, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.19722457733622, 2.56494935746154, 1.79175946922805, 2.83321334405622, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.79175946922805, 2.30258509299405, 2.56494935746154, 2.39789527279837, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.94591014905531, 2.77258872223978, 2.484906649788, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 2.484906649788, 0, 0.693147180559945, 0.693147180559945, 2.30258509299405, 1.94591014905531, 2.56494935746154, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.09861228866811, 0, 1.09861228866811, 2.19722457733622, 2.07944154167984, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.94591014905531, 1.6094379124341, 0.693147180559945, 2.77258872223978, 2.484906649788, 1.38629436111989, 2.39789527279837, 2.39789527279837, 2.19722457733622, 0.693147180559945, 1.6094379124341, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.19722457733622, 2.484906649788, 1.38629436111989, 2.07944154167984, 1.38629436111989, 1.38629436111989, 2.89037175789616, 1.6094379124341, 2.56494935746154, 1.38629436111989, 2.19722457733622, 1.38629436111989, 2.30258509299405, 2.07944154167984, 1.79175946922805, 2.56494935746154, 1.38629436111989, 2.77258872223978, 1.79175946922805, 2.39789527279837, 1.79175946922805, 1.09861228866811, 1.94591014905531, 1.79175946922805, 2.19722457733622, 1.6094379124341, 2.56494935746154, 1.38629436111989, 0.693147180559945, 2.07944154167984, 1.94591014905531, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.39789527279837, 2.39789527279837, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0, 1.79175946922805, 2.83321334405622, 1.79175946922805, 1.79175946922805, 0, 2.30258509299405, 1.38629436111989, 2.77258872223978, 1.94591014905531, 1.09861228866811, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.6094379124341, 2.30258509299405, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.79175946922805, 2.07944154167984, 2.39789527279837, 1.79175946922805, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 1.79175946922805, 2.30258509299405, 1.94591014905531, 1.09861228866811, 3.2188758248682, 1.94591014905531, 1.79175946922805, 0, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.94591014905531, 1.38629436111989, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.56494935746154, 1.6094379124341, 1.09861228866811, 1.94591014905531, 2.07944154167984, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.83321334405622, 1.94591014905531, 2.39789527279837, 2.30258509299405, 2.39789527279837, 1.94591014905531, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.09861228866811, 0.693147180559945, 1.6094379124341, 2.63905732961526, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.94591014905531, 2.30258509299405, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.99573227355399, 2.07944154167984, 2.77258872223978, 1.6094379124341, 0, 2.07944154167984, 1.09861228866811, 1.79175946922805, 2.39789527279837, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.19722457733622, 2.30258509299405, 2.484906649788, 1.94591014905531, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 2.63905732961526, 1.6094379124341, 1.94591014905531, 1.6094379124341, 2.63905732961526, 2.56494935746154, 1.94591014905531, 2.484906649788, 1.79175946922805, 2.07944154167984, 2.07944154167984, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.94591014905531, 0, 1.38629436111989, 2.484906649788, 2.39789527279837, 1.94591014905531, 2.39789527279837, 2.07944154167984, 1.6094379124341, 2.63905732961526, 1.79175946922805, 2.63905732961526, 2.39789527279837, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 1.79175946922805, 1.79175946922805, 2.63905732961526, 2.19722457733622, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.39789527279837, 1.38629436111989, 2.19722457733622, 1.79175946922805, 2.19722457733622, 1.09861228866811, 1.09861228866811, 2.19722457733622, 2.39789527279837, 1.09861228866811, 1.38629436111989, 1.79175946922805, 2.63905732961526, 0, 1.6094379124341, 1.79175946922805, 0, 2.63905732961526, 2.07944154167984, 1.38629436111989, 1.79175946922805, 2.63905732961526, 0.693147180559945, 1.79175946922805, 1.6094379124341, 2.19722457733622, 0, 2.39789527279837, 0.693147180559945, 1.79175946922805, 2.39789527279837, 2.63905732961526, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.70805020110221, 1.09861228866811, 1.79175946922805, 1.09861228866811, 2.56494935746154, 2.19722457733622, 1.6094379124341, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 2.56494935746154, 1.38629436111989, 1.38629436111989, 2.484906649788, 2.30258509299405, 2.30258509299405, 0, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.30258509299405, 1.94591014905531, 1.79175946922805, 0, 2.77258872223978, 2.56494935746154, 2.30258509299405, 2.19722457733622, 2.30258509299405, 1.6094379124341, 2.30258509299405, 1.79175946922805, 2.19722457733622, 0.693147180559945, 1.79175946922805, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.09861228866811, 2.07944154167984, 1.09861228866811, 2.30258509299405, 0.693147180559945, 2.83321334405622, 2.83321334405622, 2.83321334405622, 2.07944154167984, 2.77258872223978, 2.30258509299405, 1.38629436111989, 0.693147180559945, 2.56494935746154, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.79175946922805, 3.09104245335832, 2.30258509299405, 2.56494935746154, 1.38629436111989, 2.07944154167984, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.89037175789616, 2.19722457733622, 1.79175946922805, 2.07944154167984, 1.09861228866811, 0, 0, 0, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.70805020110221, 2.83321334405622, 1.79175946922805, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 2.39789527279837, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.07944154167984, 2.56494935746154, 2.07944154167984, 1.38629436111989, 2.70805020110221, 1.6094379124341, 2.484906649788, 1.79175946922805, 1.94591014905531, 2.70805020110221, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 0, 0, 1.38629436111989, 2.63905732961526, 1.79175946922805, 0.693147180559945, 1.94591014905531, 1.79175946922805, 2.07944154167984, 2.56494935746154, 0.693147180559945, 2.70805020110221, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.6094379124341, 0, 1.38629436111989, 2.30258509299405, 1.79175946922805, 0, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.484906649788, 2.30258509299405, 1.6094379124341, 1.79175946922805, 3.17805383034795, 2.70805020110221, 1.6094379124341, 2.19722457733622, 0.693147180559945, 2.70805020110221, 2.07944154167984, 2.63905732961526, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.56494935746154, 2.07944154167984, 1.94591014905531, 2.39789527279837, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0, 1.09861228866811, 2.484906649788, 2.19722457733622, 0, 1.94591014905531, 2.07944154167984, 0, 0, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.38629436111989, 2.39789527279837, 2.56494935746154, 0.693147180559945, 2.30258509299405, 1.94591014905531, 2.07944154167984, 2.30258509299405, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.6094379124341, 0.693147180559945, 0.693147180559945, 2.83321334405622, 2.70805020110221, 0.693147180559945, 1.94591014905531, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0, 2.484906649788, 1.38629436111989, 1.79175946922805, 2.484906649788, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.77258872223978, 2.63905732961526, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.89037175789616, 0, 0, 2.39789527279837, 1.38629436111989, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.39789527279837, 2.56494935746154, 1.6094379124341, 2.56494935746154, 2.63905732961526, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.6094379124341, 0, 0, 0, 0, 0.693147180559945, 1.94591014905531, 2.39789527279837, 0.693147180559945, 1.6094379124341, 1.94591014905531, 2.56494935746154, 2.39789527279837, 2.63905732961526, 0, 1.09861228866811, 0, 0, 2.07944154167984, 1.79175946922805, 2.63905732961526, 0.693147180559945, 1.09861228866811, 2.39789527279837, 0.693147180559945, 2.63905732961526, 2.07944154167984, 1.79175946922805, 1.38629436111989, 0, 1.79175946922805, 2.19722457733622, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.6094379124341, 2.39789527279837, 2.30258509299405, 1.09861228866811, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.6094379124341, 0.693147180559945, 2.30258509299405, 0, 1.94591014905531, 2.30258509299405, 1.94591014905531, 0.693147180559945, 2.77258872223978, 0, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 2.77258872223978, 2.63905732961526, 2.63905732961526, 1.6094379124341, 2.19722457733622, 2.484906649788, 2.484906649788, 1.79175946922805, 0.693147180559945, 2.484906649788, 2.30258509299405, 2.39789527279837, 2.484906649788, 1.94591014905531, 2.30258509299405, 1.6094379124341, 1.94591014905531, 1.09861228866811, 2.56494935746154, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 1.79175946922805, 0, 0, 0, 1.79175946922805, 0, 1.6094379124341, 0, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0, 2.56494935746154, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.484906649788, 0, 1.38629436111989, 1.94591014905531, 2.19722457733622, 2.19722457733622, 1.79175946922805, 1.79175946922805, 2.484906649788, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.94591014905531, 2.19722457733622, 1.94591014905531, 2.19722457733622, 3.04452243772342, 0.693147180559945, 2.30258509299405, 2.56494935746154, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.09861228866811, 2.94443897916644, 1.79175946922805, 2.83321334405622, 2.07944154167984, 1.38629436111989, 1.38629436111989, 2.19722457733622, 2.30258509299405, 2.30258509299405, 1.94591014905531, 1.94591014905531, 2.99573227355399, 2.30258509299405, 2.30258509299405, 2.30258509299405, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.39789527279837, 2.83321334405622, 1.09861228866811, 0, 2.30258509299405, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.30258509299405, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.94591014905531, 2.39789527279837, 1.94591014905531, 3.04452243772342, 1.94591014905531, 2.484906649788, 0.693147180559945, 2.07944154167984, 1.6094379124341, 2.77258872223978, 0, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0, 2.63905732961526, 1.38629436111989, 0, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.94591014905531, 2.484906649788, 1.79175946922805, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.99573227355399, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.07944154167984, 2.39789527279837, 0.693147180559945, 2.19722457733622, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 3.04452243772342, 1.94591014905531, 0.693147180559945, 2.19722457733622, 0, 0, 0.693147180559945, 0, 2.56494935746154, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.39789527279837, 1.38629436111989, 2.07944154167984, 2.77258872223978, 1.6094379124341, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.09861228866811, 2.30258509299405, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.6094379124341, 2.39789527279837, 2.07944154167984, 1.79175946922805, 2.63905732961526, 1.6094379124341, 2.94443897916644, 1.38629436111989, 2.19722457733622, 2.484906649788, 1.09861228866811, 1.38629436111989, 1.38629436111989, 2.89037175789616, 1.94591014905531, 1.09861228866811, 1.09861228866811, 2.94443897916644, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0.693147180559945, 2.39789527279837, 1.38629436111989, 2.94443897916644, 1.38629436111989, 1.38629436111989, 1.6094379124341, 2.56494935746154, 3.04452243772342, 2.39789527279837, 2.30258509299405, 1.94591014905531, 2.19722457733622, 1.38629436111989, 1.38629436111989, 0, 1.38629436111989, 1.6094379124341, 2.07944154167984, 2.07944154167984, 2.30258509299405, 1.38629436111989, 1.09861228866811, 2.77258872223978, 1.38629436111989, 3.04452243772342, 2.39789527279837, 1.38629436111989, 0.693147180559945, 1.94591014905531, 2.30258509299405, 2.63905732961526, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.94591014905531, 1.38629436111989, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.6094379124341, 1.6094379124341, 0.693147180559945, 2.77258872223978, 0, 2.19722457733622, 2.484906649788, 1.09861228866811, 2.56494935746154, 0.693147180559945, 1.6094379124341, 2.39789527279837, 0.693147180559945, 2.70805020110221, 1.09861228866811, 1.6094379124341, 2.30258509299405, 2.07944154167984, 0.693147180559945, 2.83321334405622, 1.94591014905531, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 1.94591014905531, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.6094379124341, 2.19722457733622, 2.63905732961526, 1.6094379124341, 2.19722457733622, 1.94591014905531, 2.484906649788, 2.70805020110221, 1.09861228866811, 2.07944154167984, 2.39789527279837, 2.484906649788, 2.39789527279837, 1.38629436111989, 2.19722457733622, 2.07944154167984, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.83321334405622, 1.09861228866811, 2.94443897916644, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.6094379124341, 3.09104245335832, 1.79175946922805, 0.693147180559945, 1.09861228866811, 2.94443897916644, 0.693147180559945, 0.693147180559945, 2.39789527279837, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.94591014905531, 0.693147180559945, 2.07944154167984, 1.94591014905531, 0, 2.07944154167984, 1.6094379124341, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.94591014905531, 0, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.07944154167984, 2.07944154167984, 0, 2.07944154167984, 2.07944154167984, 1.79175946922805, 2.30258509299405, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.07944154167984, 2.83321334405622, 2.07944154167984, 2.07944154167984, 2.484906649788, 1.6094379124341, 1.09861228866811, 2.30258509299405, 2.30258509299405, 2.70805020110221, 2.19722457733622, 1.79175946922805, 2.19722457733622, 1.94591014905531, 2.39789527279837, 2.39789527279837, 2.07944154167984, 1.79175946922805, 2.63905732961526, 1.6094379124341, 2.30258509299405, 1.6094379124341, 0.693147180559945, 2.30258509299405, 2.30258509299405, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.07944154167984, 2.63905732961526, 2.07944154167984, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.09861228866811, 2.77258872223978, 2.39789527279837, 1.6094379124341, 0.693147180559945, 1.6094379124341, 0, 2.99573227355399, 0.693147180559945, 0.693147180559945, 1.79175946922805, 2.484906649788, 1.38629436111989, 0, 2.07944154167984, 0, 1.09861228866811, 1.09861228866811, 2.19722457733622, 1.94591014905531, 1.94591014905531, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.94591014905531, 2.30258509299405, 2.07944154167984, 1.6094379124341, 1.94591014905531, 2.07944154167984, 2.07944154167984, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.484906649788, 1.09861228866811, 1.6094379124341, 1.94591014905531, 2.70805020110221, 1.09861228866811, 2.63905732961526, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 2.19722457733622, 2.07944154167984, 1.09861228866811, 1.94591014905531, 2.39789527279837, 2.99573227355399, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.70805020110221, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 2.63905732961526, 2.63905732961526, 1.94591014905531, 1.79175946922805, 2.07944154167984, 1.79175946922805, 2.07944154167984, 2.484906649788, 1.94591014905531, 1.6094379124341, 2.07944154167984, 1.38629436111989, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.09861228866811, 1.6094379124341, 2.94443897916644, 1.79175946922805, 0, 0.693147180559945, 0, 2.19722457733622, 0.693147180559945, 1.94591014905531, 2.07944154167984, 1.79175946922805, 1.38629436111989, 2.19722457733622, 1.09861228866811, 1.94591014905531, 2.19722457733622, 1.09861228866811, 2.07944154167984, 1.6094379124341, 2.19722457733622, 1.94591014905531, 0.693147180559945, 1.94591014905531, 2.19722457733622, 3.04452243772342, 1.6094379124341, 2.39789527279837, 0, 2.77258872223978, 1.6094379124341, 2.30258509299405, 1.38629436111989, 1.6094379124341, 2.07944154167984, 2.30258509299405, 1.38629436111989, 0.693147180559945, 2.39789527279837, 0, 1.94591014905531, 2.19722457733622, 1.09861228866811, 1.79175946922805, 2.07944154167984, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0, 2.19722457733622, 1.09861228866811, 2.30258509299405, 2.07944154167984, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.19722457733622, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.6094379124341, 2.30258509299405, 2.07944154167984, 1.09861228866811, 2.70805020110221, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 2.07944154167984, 1.79175946922805, 2.39789527279837, 1.94591014905531, 1.79175946922805, 2.39789527279837, 1.6094379124341, 2.39789527279837, 2.07944154167984, 1.09861228866811, 1.6094379124341, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.09861228866811, 2.19722457733622, 1.6094379124341, 2.07944154167984, 0, 1.6094379124341, 2.89037175789616, 1.38629436111989, 2.99573227355399, 2.30258509299405, 1.94591014905531, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.94591014905531, 2.30258509299405, 2.56494935746154, 2.77258872223978, 0.693147180559945, 2.99573227355399, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.6094379124341, 2.63905732961526, 0, 3.36729582998647, 1.6094379124341, 2.07944154167984, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.63905732961526, 0.693147180559945, 1.94591014905531, 2.56494935746154, 1.79175946922805, 2.30258509299405, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.94591014905531, 2.19722457733622, 2.70805020110221, 2.07944154167984, 2.70805020110221, 0, 1.38629436111989, 2.07944154167984, 1.79175946922805, 2.19722457733622, 1.94591014905531, 2.30258509299405, 2.30258509299405, 0.693147180559945, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.6094379124341, 2.07944154167984, 2.30258509299405, 2.07944154167984, 2.77258872223978, 1.79175946922805, 1.09861228866811, 1.79175946922805, 2.56494935746154, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.79175946922805, 2.30258509299405, 2.07944154167984, 2.07944154167984, 1.94591014905531, 0, 2.19722457733622, 1.79175946922805, 1.79175946922805, 1.94591014905531, 2.39789527279837, 2.30258509299405, 1.6094379124341, 2.484906649788, 2.39789527279837, 1.79175946922805, 2.77258872223978, 2.56494935746154, 1.09861228866811, 1.38629436111989, 2.484906649788, 2.19722457733622, 1.6094379124341, 1.09861228866811, 2.39789527279837, 1.09861228866811, 2.39789527279837, 1.94591014905531, 3.29583686600433, 1.38629436111989, 2.56494935746154, 0.693147180559945, 1.38629436111989, 2.19722457733622, 2.56494935746154, 2.19722457733622, 1.6094379124341, 2.19722457733622, 1.38629436111989, 1.94591014905531, 2.77258872223978, 1.38629436111989, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.30258509299405, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.09861228866811, 2.07944154167984, 2.70805020110221, 2.07944154167984, 1.79175946922805, 2.19722457733622, 2.63905732961526, 0.693147180559945, 2.19722457733622, 0.693147180559945, 2.19722457733622, 0.693147180559945, 2.07944154167984, 1.94591014905531, 2.70805020110221, 1.6094379124341, 1.6094379124341, 1.79175946922805, 2.39789527279837, 2.70805020110221, 1.09861228866811, 2.484906649788, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 1.6094379124341, 0, 2.63905732961526, 2.30258509299405, 1.09861228866811, 1.94591014905531, 0, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.79175946922805, 1.6094379124341, 2.07944154167984, 1.38629436111989, 2.39789527279837, 1.94591014905531, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.79175946922805, 1.09861228866811, 2.70805020110221, 2.484906649788, 1.94591014905531, 1.6094379124341, 1.38629436111989, 2.19722457733622, 1.6094379124341, 2.77258872223978, 2.77258872223978, 2.39789527279837, 2.39789527279837, 0.693147180559945, 2.484906649788, 2.484906649788, 1.6094379124341, 2.39789527279837, 1.09861228866811, 0.693147180559945, 2.19722457733622, 2.39789527279837, 2.484906649788, 2.19722457733622, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.07944154167984, 2.30258509299405, 2.30258509299405, 2.19722457733622, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 3.09104245335832, 2.70805020110221, 2.19722457733622, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.56494935746154, 1.09861228866811, 1.94591014905531, 2.70805020110221, 2.484906649788, 1.38629436111989, 2.19722457733622, 1.6094379124341, 2.484906649788, 1.38629436111989, 2.30258509299405, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.79175946922805, 2.19722457733622, 1.38629436111989, 0.693147180559945, 2.39789527279837, 0, 2.07944154167984, 2.94443897916644, 2.89037175789616, 2.30258509299405, 1.38629436111989, 1.6094379124341, 1.6094379124341, 2.39789527279837, 2.63905732961526, 2.484906649788, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.6094379124341, 2.63905732961526, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 2.19722457733622, 1.94591014905531, 1.6094379124341, 2.56494935746154, 0.693147180559945, 2.30258509299405, 0.693147180559945, 2.56494935746154, 2.56494935746154, 2.30258509299405, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.77258872223978, 0, 0.693147180559945, 2.07944154167984, 2.30258509299405, 2.07944154167984, 1.09861228866811, 2.30258509299405, 2.77258872223978, 2.07944154167984, 1.79175946922805, 1.38629436111989, 1.94591014905531, 2.89037175789616, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.6094379124341, 2.70805020110221, 1.38629436111989, 2.39789527279837, 2.30258509299405, 1.79175946922805, 1.6094379124341, 2.07944154167984, 2.77258872223978, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.56494935746154, 2.89037175789616, 1.09861228866811, 2.63905732961526, 2.56494935746154, 1.09861228866811, 2.30258509299405, 1.94591014905531, 2.39789527279837, 2.07944154167984, 1.79175946922805, 2.63905732961526, 3.29583686600433, 1.6094379124341, 2.19722457733622, 2.30258509299405, 1.38629436111989, 1.38629436111989, 2.484906649788, 2.07944154167984, 1.79175946922805, 1.09861228866811, 2.07944154167984, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.83321334405622, 1.09861228866811, 1.6094379124341, 2.07944154167984, 2.39789527279837, 2.30258509299405, 0, 1.94591014905531, 0.693147180559945, 2.484906649788, 2.39789527279837, 0, 0, 2.07944154167984, 1.09861228866811, 2.07944154167984, 2.63905732961526, 2.484906649788, 2.07944154167984, 1.38629436111989, 2.30258509299405, 2.07944154167984, 2.30258509299405, 3.09104245335832, 2.63905732961526, 1.09861228866811, 0, 2.94443897916644, 1.79175946922805, 1.94591014905531, 2.30258509299405, 2.30258509299405, 2.07944154167984, 2.39789527279837, 2.484906649788, 1.6094379124341, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0.693147180559945, 1.38629436111989, 2.19722457733622, 0, 2.19722457733622, 2.484906649788, 0.693147180559945, 1.79175946922805, 0.693147180559945, 2.30258509299405, 1.38629436111989, 1.94591014905531, 0, 0, 1.79175946922805, 0.693147180559945, 1.79175946922805, 0, 1.94591014905531, 1.6094379124341, 2.56494935746154, 2.07944154167984, 2.07944154167984, 0, 1.94591014905531, 2.39789527279837, 1.38629436111989, 1.38629436111989, 2.39789527279837, 2.39789527279837, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 2.07944154167984, 1.38629436111989, 2.77258872223978, 1.38629436111989, 2.39789527279837, 2.70805020110221, 1.38629436111989, 1.94591014905531, 1.94591014905531, 1.6094379124341, 2.19722457733622, 2.19722457733622, 2.07944154167984, 1.38629436111989, 0.693147180559945, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.70805020110221, 2.39789527279837, 2.63905732961526, 2.19722457733622, 1.09861228866811, 1.6094379124341, 1.94591014905531, 1.09861228866811, 2.19722457733622, 2.63905732961526, 2.56494935746154, 2.39789527279837, 0.693147180559945, 2.07944154167984, 2.19722457733622, 1.94591014905531, 2.07944154167984, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 2.63905732961526, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.94591014905531, 2.70805020110221, 0, 1.38629436111989, 2.77258872223978, 1.79175946922805, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.09861228866811, 1.79175946922805, 1.09861228866811, 2.30258509299405, 2.89037175789616, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.09861228866811, 2.39789527279837, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.09861228866811, 2.70805020110221, 1.94591014905531, 2.07944154167984, 1.6094379124341, 0, 1.79175946922805, 2.19722457733622, 1.94591014905531, 0.693147180559945, 0, 2.30258509299405, 2.30258509299405, 2.07944154167984, 1.79175946922805, 2.70805020110221, 1.38629436111989, 2.63905732961526, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.19722457733622, 2.19722457733622, 0, 1.38629436111989, 1.79175946922805, 2.19722457733622, 0.693147180559945, 1.94591014905531, 1.79175946922805, 2.19722457733622, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.94591014905531, 0, 0, 2.89037175789616, 0, 0, 1.6094379124341, 3.09104245335832, 1.38629436111989, 2.30258509299405, 2.63905732961526, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0, 0, 1.09861228866811, 2.39789527279837, 0.693147180559945, 0, 1.94591014905531, 2.39789527279837, 1.38629436111989, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 2.70805020110221, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.6094379124341, 0, 2.77258872223978, 2.99573227355399, 0, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.94591014905531, 1.09861228866811, 1.79175946922805, 0.693147180559945, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.19722457733622, 0, 1.09861228866811, 1.6094379124341, 1.6094379124341, 0, 2.83321334405622, 2.39789527279837, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.38629436111989, 2.63905732961526, 2.07944154167984, 0.693147180559945, 0, 1.94591014905531, 2.39789527279837, 1.38629436111989, 0, 1.94591014905531, 2.07944154167984, 2.07944154167984, 1.6094379124341, 2.19722457733622, 2.39789527279837, 1.79175946922805, 0, 1.79175946922805, 2.484906649788, 2.63905732961526, 2.19722457733622, 2.07944154167984, 2.07944154167984, 2.30258509299405, 2.77258872223978, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.09861228866811, 2.19722457733622, 2.56494935746154, 1.79175946922805, 0.693147180559945, 1.94591014905531, 0, 0.693147180559945, 2.99573227355399, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.07944154167984, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 2.07944154167984, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 0, 1.38629436111989, 2.63905732961526, 2.07944154167984, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.6094379124341, 0, 2.30258509299405, 1.38629436111989, 0, 0, 0, 1.94591014905531, 1.6094379124341, 2.56494935746154, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.07944154167984, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 2.30258509299405, 1.94591014905531, 0, 2.484906649788, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.07944154167984, 2.19722457733622, 1.09861228866811, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.79175946922805, 3.09104245335832, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.94591014905531, 1.6094379124341, 2.63905732961526, 1.38629436111989, 2.484906649788, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.56494935746154, 1.94591014905531, 2.94443897916644, 2.39789527279837, 1.6094379124341, 1.38629436111989, 0.693147180559945, 2.83321334405622, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.79175946922805, 2.30258509299405, 1.79175946922805, 1.94591014905531, 2.07944154167984, 2.30258509299405, 1.94591014905531, 2.19722457733622, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 2.07944154167984, 2.19722457733622, 2.39789527279837, 0.693147180559945, 2.70805020110221, 1.94591014905531, 2.484906649788, 2.30258509299405, 1.94591014905531, 2.30258509299405, 1.94591014905531, 2.484906649788, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 2.83321334405622, 2.19722457733622, 2.07944154167984, 1.6094379124341, 2.07944154167984, 2.19722457733622, 1.94591014905531, 2.19722457733622, 2.89037175789616, 2.07944154167984, 2.19722457733622, 2.77258872223978, 2.484906649788, 1.6094379124341, 2.07944154167984, 2.56494935746154, 1.38629436111989, 2.30258509299405, 1.94591014905531, 2.56494935746154, 1.6094379124341, 1.38629436111989, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.19722457733622, 2.19722457733622, 1.09861228866811, 2.19722457733622, 2.19722457733622, 1.09861228866811, 2.19722457733622, 2.39789527279837, 1.6094379124341, 2.07944154167984, 2.77258872223978, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.38629436111989, 2.70805020110221, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0.693147180559945, 2.70805020110221, 1.94591014905531, 1.79175946922805, 2.19722457733622, 2.83321334405622, 1.09861228866811, 1.79175946922805, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.56494935746154, 2.39789527279837, 2.19722457733622, 1.6094379124341, 2.30258509299405, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.6094379124341, 2.07944154167984, 1.79175946922805, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.6094379124341, 2.484906649788, 1.79175946922805, 0, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.30258509299405, 1.94591014905531, 0.693147180559945, 0.693147180559945, 2.39789527279837, 2.07944154167984, 1.79175946922805, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 2.19722457733622, 2.30258509299405, 1.79175946922805, 1.79175946922805, 1.6094379124341, 3.17805383034795, 1.94591014905531, 0.693147180559945, 1.94591014905531, 2.19722457733622, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.19722457733622, 1.94591014905531, 2.07944154167984, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.94591014905531, 2.07944154167984, 1.09861228866811, 2.07944154167984, 2.77258872223978, 2.30258509299405, 2.19722457733622, 0.693147180559945, 2.30258509299405, 3.2188758248682, 1.6094379124341, 1.94591014905531, 1.94591014905531, 1.79175946922805, 2.63905732961526, 2.83321334405622, 0, 1.09861228866811, 2.39789527279837, 1.79175946922805, 2.30258509299405, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.94591014905531, 2.70805020110221, 2.07944154167984, 1.94591014905531, 2.70805020110221, 1.94591014905531, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.19722457733622, 2.70805020110221, 0, 1.6094379124341, 2.39789527279837, 1.94591014905531, 1.38629436111989, 0.693147180559945, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.38629436111989, 2.19722457733622, 1.94591014905531, 1.6094379124341, 1.6094379124341, 1.6094379124341, 2.77258872223978, 2.19722457733622, 2.30258509299405, 1.79175946922805, 1.79175946922805, 1.79175946922805, 2.30258509299405, 1.6094379124341, 1.79175946922805, 2.19722457733622, 0, 1.09861228866811, 0, 2.63905732961526, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.484906649788, 1.79175946922805, 0.693147180559945, 1.38629436111989, 2.484906649788, 0, 1.94591014905531, 1.6094379124341, 1.09861228866811, 2.19722457733622, 0, 1.79175946922805, 2.56494935746154, 0.693147180559945, 1.79175946922805, 1.79175946922805, 2.19722457733622, 1.6094379124341, 1.79175946922805, 2.77258872223978, 0, 2.07944154167984, 2.484906649788, 1.09861228866811, 1.94591014905531, 1.94591014905531, 1.79175946922805, 1.94591014905531, 1.09861228866811, 2.19722457733622, 2.484906649788, 0.693147180559945, 1.09861228866811, 2.77258872223978, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.09861228866811, 1.09861228866811, 2.39789527279837, 2.63905732961526, 1.38629436111989, 2.19722457733622, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.79175946922805, 2.89037175789616, 1.6094379124341, 3.04452243772342, 2.63905732961526, 1.94591014905531, 2.30258509299405, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.70805020110221, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.39789527279837, 1.09861228866811, 1.6094379124341, 1.94591014905531, 0, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 2.484906649788, 1.79175946922805, 2.70805020110221, 3.04452243772342, 2.484906649788, 3.04452243772342, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 0.693147180559945, 2.19722457733622, 2.07944154167984, 0.693147180559945, 1.79175946922805, 2.30258509299405, 1.79175946922805, 0, 1.79175946922805, 2.99573227355399, 1.38629436111989, 1.38629436111989, 1.94591014905531, 2.07944154167984, 2.63905732961526, 2.30258509299405, 2.19722457733622, 1.79175946922805, 2.19722457733622, 0.693147180559945, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.09861228866811, 2.07944154167984, 2.70805020110221, 1.38629436111989, 0, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 2.56494935746154, 1.6094379124341, 0, 1.94591014905531, 1.6094379124341, 0, 2.19722457733622, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.63905732961526, 0, 1.94591014905531, 1.79175946922805, 2.56494935746154, 1.94591014905531, 2.19722457733622, 1.79175946922805, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.07944154167984, 1.38629436111989, 1.38629436111989, 2.07944154167984, 3.17805383034795, 1.94591014905531, 1.6094379124341, 1.94591014905531, 1.79175946922805, 0.693147180559945, 2.484906649788, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 2.39789527279837, 0, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.94591014905531, 2.30258509299405, 2.19722457733622, 0, 1.79175946922805, 0, 2.07944154167984, 2.39789527279837, 1.09861228866811, 1.79175946922805, 1.6094379124341, 1.38629436111989, 2.30258509299405, 2.19722457733622, 0, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.6094379124341, 2.19722457733622, 2.39789527279837, 1.6094379124341, 2.39789527279837, 2.484906649788, 2.484906649788, 2.39789527279837, 1.79175946922805, 2.30258509299405, 2.39789527279837, 2.19722457733622, 1.6094379124341, 2.39789527279837, 2.39789527279837, 2.30258509299405, 1.09861228866811, 2.07944154167984, 0.693147180559945, 2.89037175789616, 1.94591014905531, 1.6094379124341, 1.38629436111989, 2.39789527279837, 1.09861228866811, 2.07944154167984, 2.63905732961526, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.56494935746154, 2.56494935746154, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 2.56494935746154, 1.09861228866811, 2.30258509299405, 2.30258509299405, 2.484906649788, 2.70805020110221, 1.6094379124341, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 2.30258509299405, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.39789527279837, 0, 2.70805020110221, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.38629436111989, 2.30258509299405, 1.6094379124341, 1.38629436111989, 1.6094379124341, 2.39789527279837, 1.79175946922805, 1.38629436111989, 0, 1.38629436111989, 1.94591014905531, 2.39789527279837, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0.693147180559945, 2.07944154167984, 2.19722457733622, 2.63905732961526, 2.07944154167984, 1.94591014905531, 0.693147180559945, 2.07944154167984, 2.07944154167984, 2.484906649788, 1.79175946922805, 2.07944154167984, 1.94591014905531, 2.19722457733622, 1.09861228866811, 0, 1.6094379124341, 1.6094379124341, 0, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.6094379124341, 2.07944154167984, 2.19722457733622, 0.693147180559945, 1.09861228866811, 2.63905732961526, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.38629436111989, 2.94443897916644, 2.07944154167984, 1.09861228866811, 2.94443897916644, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.6094379124341, 2.30258509299405, 1.09861228866811, 0, 1.94591014905531, 2.30258509299405, 1.6094379124341, 2.30258509299405, 1.94591014905531, 1.09861228866811, 2.83321334405622, 1.09861228866811, 0, 2.39789527279837, 2.30258509299405, 1.6094379124341, 1.79175946922805, 1.94591014905531, 2.19722457733622, 2.89037175789616, 1.94591014905531, 2.19722457733622, 2.19722457733622, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.30258509299405, 1.6094379124341, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.79175946922805, 2.56494935746154, 2.30258509299405, 1.6094379124341, 0.693147180559945, 0, 2.70805020110221, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.30258509299405, 2.07944154167984, 2.30258509299405, 0, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 2.19722457733622, 2.63905732961526, 1.94591014905531, 2.30258509299405, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 2.39789527279837, 1.6094379124341, 1.94591014905531, 1.09861228866811, 2.30258509299405, 1.38629436111989, 2.07944154167984, 1.79175946922805, 1.6094379124341, 2.77258872223978, 2.19722457733622, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.07944154167984, 0.693147180559945, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.63905732961526, 1.38629436111989, 2.30258509299405, 2.07944154167984, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 2.07944154167984, 1.79175946922805, 2.30258509299405, 2.56494935746154, 1.79175946922805, 0, 0, 0.693147180559945, 2.19722457733622, 2.19722457733622, 2.484906649788, 0.693147180559945, 1.94591014905531, 1.38629436111989, 2.56494935746154, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 0, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 2.30258509299405, 2.07944154167984, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.38629436111989, 1.94591014905531, 1.6094379124341, 1.79175946922805, 2.70805020110221, 0.693147180559945, 2.99573227355399, 2.30258509299405, 1.94591014905531, 2.56494935746154, 1.79175946922805, 1.38629436111989, 1.6094379124341, 2.56494935746154, 2.39789527279837, 2.56494935746154, 1.6094379124341, 2.484906649788, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.79175946922805, 2.56494935746154, 1.6094379124341, 1.6094379124341, 2.19722457733622, 2.484906649788, 1.79175946922805, 1.79175946922805, 1.09861228866811, 2.19722457733622, 0.693147180559945, 2.484906649788, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.83321334405622, 1.6094379124341, 1.79175946922805, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.56494935746154, 1.09861228866811, 2.07944154167984, 2.484906649788, 1.79175946922805, 1.79175946922805, 1.94591014905531, 2.07944154167984, 1.94591014905531, 2.19722457733622, 1.6094379124341, 2.07944154167984, 1.6094379124341, 2.484906649788, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0, 1.94591014905531, 2.19722457733622, 0.693147180559945, 2.484906649788, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.79175946922805, 2.70805020110221, 0.693147180559945, 2.19722457733622, 2.07944154167984, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.6094379124341, 2.07944154167984, 2.30258509299405, 1.09861228866811, 1.38629436111989, 2.484906649788, 0, 2.63905732961526, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.38629436111989, 2.19722457733622, 3.17805383034795, 0.693147180559945, 2.484906649788, 1.6094379124341, 0.693147180559945, 0.693147180559945, 2.19722457733622, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.484906649788, 2.56494935746154, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.38629436111989, 2.484906649788, 1.6094379124341, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.484906649788, 1.38629436111989, 1.38629436111989, 2.30258509299405, 1.38629436111989, 2.39789527279837, 2.39789527279837, 2.19722457733622, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.94591014905531, 2.484906649788, 1.94591014905531, 3.04452243772342, 1.94591014905531, 2.07944154167984, 2.484906649788, 1.09861228866811, 1.6094379124341, 2.19722457733622, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.6094379124341, 2.70805020110221, 1.6094379124341, 2.94443897916644, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.39789527279837, 1.38629436111989, 0, 2.07944154167984, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.94591014905531, 2.39789527279837, 2.63905732961526, 0.693147180559945, 1.38629436111989, 2.70805020110221, 2.484906649788, 1.94591014905531, 1.79175946922805, 0, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.79175946922805, 0.693147180559945, 2.19722457733622, 0, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.30258509299405, 1.38629436111989, 2.07944154167984, 0, 1.38629436111989, 1.38629436111989, 2.39789527279837, 2.30258509299405, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.6094379124341, 2.56494935746154, 1.79175946922805, 1.09861228866811, 2.484906649788, 2.07944154167984, 1.6094379124341, 1.94591014905531, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.30258509299405, 2.39789527279837, 2.19722457733622, 2.19722457733622, 1.94591014905531, 0.693147180559945, 2.07944154167984, 2.19722457733622, 2.77258872223978, 2.56494935746154, 2.56494935746154, 1.6094379124341, 2.19722457733622, 1.09861228866811, 2.77258872223978, 1.79175946922805, 2.30258509299405, 2.19722457733622, 2.07944154167984, 1.09861228866811, 1.6094379124341, 0, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.6094379124341, 2.56494935746154, 1.6094379124341, 1.6094379124341, 2.07944154167984, 2.94443897916644, 1.79175946922805, 2.39789527279837, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.38629436111989, 2.39789527279837, 1.38629436111989, 1.6094379124341, 2.30258509299405, 2.07944154167984, 2.484906649788, 1.94591014905531, 1.6094379124341, 2.30258509299405, 1.38629436111989, 1.94591014905531, 2.19722457733622, 2.89037175789616, 0.693147180559945, 1.6094379124341, 2.30258509299405, 2.39789527279837, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.39789527279837, 1.94591014905531, 1.09861228866811, 2.56494935746154, 1.79175946922805, 2.89037175789616, 1.38629436111989, 2.30258509299405, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.19722457733622, 0.693147180559945, 1.6094379124341, 3.13549421592915, 2.30258509299405, 2.70805020110221, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 2.99573227355399, 1.6094379124341, 2.94443897916644, 2.39789527279837, 2.89037175789616, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.38629436111989, 1.38629436111989, 2.07944154167984, 1.38629436111989, 2.63905732961526, 1.94591014905531, 1.09861228866811, 3.09104245335832, 2.19722457733622, 2.39789527279837, 2.83321334405622, 1.38629436111989, 1.94591014905531, 1.6094379124341, 2.77258872223978, 1.09861228866811, 2.63905732961526, 1.6094379124341, 2.484906649788, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.38629436111989, 2.39789527279837, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 2.30258509299405, 1.38629436111989, 1.79175946922805, 1.94591014905531, 2.19722457733622, 1.79175946922805, 1.94591014905531, 2.30258509299405, 1.94591014905531, 2.19722457733622, 1.94591014905531, 1.38629436111989, 2.56494935746154, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.77258872223978, 1.6094379124341, 1.79175946922805, 1.94591014905531, 2.19722457733622, 2.19722457733622, 1.94591014905531, 2.77258872223978, 1.79175946922805, 2.07944154167984, 2.19722457733622, 1.6094379124341, 1.38629436111989, 1.38629436111989, 0, 1.38629436111989, 2.39789527279837, 2.39789527279837, 2.63905732961526, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0, 2.39789527279837, 1.79175946922805, 0.693147180559945, 2.56494935746154, 2.19722457733622, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0, 1.79175946922805, 1.6094379124341, 1.94591014905531, 2.30258509299405, 1.79175946922805, 2.07944154167984, 1.09861228866811, 2.39789527279837, 2.07944154167984, 2.19722457733622, 1.94591014905531, 0, 1.38629436111989, 2.484906649788, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.38629436111989, 3.04452243772342, 1.6094379124341, 2.77258872223978, 1.38629436111989, 1.09861228866811, 2.07944154167984, 2.07944154167984, 2.07944154167984, 1.94591014905531, 2.07944154167984, 1.38629436111989, 2.19722457733622, 2.94443897916644, 1.79175946922805, 1.6094379124341, 2.07944154167984, 2.39789527279837, 2.07944154167984, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.38629436111989, 3.04452243772342, 1.38629436111989, 1.94591014905531, 2.63905732961526, 2.484906649788, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.39789527279837, 2.19722457733622, 1.6094379124341, 1.09861228866811, 2.39789527279837, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.63905732961526, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.6094379124341, 1.79175946922805, 2.30258509299405, 2.30258509299405, 1.6094379124341, 2.30258509299405, 2.19722457733622, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.30258509299405, 2.07944154167984, 1.38629436111989, 0, 2.07944154167984, 2.39789527279837, 2.56494935746154, 1.38629436111989, 1.6094379124341, 0, 1.94591014905531, 2.19722457733622, 2.07944154167984, 2.484906649788, 1.6094379124341, 3.29583686600433, 1.6094379124341, 1.79175946922805, 0, 0, 2.56494935746154, 2.484906649788, 2.39789527279837, 1.09861228866811, 2.63905732961526, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 2.39789527279837, 1.79175946922805, 1.79175946922805, 1.09861228866811, 1.79175946922805, 2.83321334405622, 1.94591014905531, 1.79175946922805, 2.99573227355399, 2.70805020110221, 2.39789527279837, 2.30258509299405, 0, 1.38629436111989, 2.484906649788, 0.693147180559945, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0, 1.79175946922805, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.38629436111989, 0, 2.30258509299405, 1.09861228866811, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.6094379124341, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.56494935746154, 1.94591014905531, 0, 0.693147180559945, 2.63905732961526, 0.693147180559945, 1.09861228866811, 2.30258509299405, 2.30258509299405, 1.09861228866811, 2.30258509299405, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.94591014905531, 1.79175946922805, 2.63905732961526, 1.79175946922805, 2.19722457733622, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.09861228866811, 2.07944154167984, 0, 1.09861228866811, 2.39789527279837, 1.6094379124341, 2.07944154167984, 2.30258509299405, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.19722457733622, 2.484906649788, 1.6094379124341, 1.94591014905531, 1.94591014905531, 1.6094379124341, 1.79175946922805, 3.2188758248682, 1.94591014905531, 2.30258509299405, 2.19722457733622, 2.56494935746154, 1.94591014905531, 1.94591014905531, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.79175946922805, 2.07944154167984, 1.6094379124341, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.09861228866811, 2.63905732961526, 1.6094379124341, 2.07944154167984, 2.77258872223978, 2.07944154167984, 1.6094379124341, 1.79175946922805, 0.693147180559945, 0.693147180559945, 2.39789527279837, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.30258509299405, 2.484906649788, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 2.30258509299405, 1.94591014905531, 2.07944154167984, 2.19722457733622, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.63905732961526, 2.39789527279837, 0.693147180559945, 2.56494935746154, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.6094379124341, 2.484906649788, 2.30258509299405, 2.07944154167984, 2.39789527279837, 1.6094379124341, 1.94591014905531, 2.30258509299405, 2.07944154167984, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.19722457733622, 2.484906649788, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.484906649788, 1.6094379124341, 1.09861228866811, 1.94591014905531, 0, 2.07944154167984, 2.07944154167984, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.94591014905531, 1.09861228866811, 2.99573227355399, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 2.19722457733622, 2.56494935746154, 1.38629436111989, 2.484906649788, 1.79175946922805, 1.09861228866811, 2.39789527279837, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.38629436111989, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.79175946922805, 2.99573227355399, 1.09861228866811, 2.19722457733622, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.39789527279837, 1.6094379124341, 0.693147180559945, 1.38629436111989, 2.94443897916644, 1.6094379124341, 1.94591014905531, 2.77258872223978, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.79175946922805, 0, 1.6094379124341, 2.07944154167984, 1.38629436111989, 0, 0, 1.94591014905531, 0, 1.79175946922805, 2.63905732961526, 2.56494935746154, 2.89037175789616, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.07944154167984, 2.07944154167984, 1.38629436111989, 2.30258509299405, 2.30258509299405, 2.19722457733622, 2.99573227355399, 1.6094379124341, 2.63905732961526, 1.6094379124341, 2.30258509299405, 0.693147180559945, 1.09861228866811, 2.77258872223978, 2.30258509299405, 2.39789527279837, 2.63905732961526, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 2.39789527279837, 2.484906649788, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.09861228866811, 2.56494935746154, 1.79175946922805, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.79175946922805, 1.79175946922805, 1.94591014905531, 2.39789527279837, 1.6094379124341, 1.94591014905531, 1.38629436111989, 2.07944154167984, 2.07944154167984, 1.94591014905531, 2.77258872223978, 2.484906649788, 2.89037175789616, 2.07944154167984, 2.39789527279837, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.38629436111989, 2.19722457733622, 1.94591014905531, 1.38629436111989, 2.30258509299405, 2.07944154167984, 2.83321334405622, 2.83321334405622, 2.19722457733622, 1.94591014905531, 1.09861228866811, 1.94591014905531, 1.6094379124341, 2.484906649788, 1.6094379124341, 2.484906649788, 1.79175946922805, 2.30258509299405, 2.70805020110221, 1.79175946922805, 2.07944154167984, 1.94591014905531, 1.94591014905531, 2.07944154167984, 1.6094379124341, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.56494935746154, 2.07944154167984, 1.94591014905531, 2.19722457733622, 2.19722457733622, 0.693147180559945, 2.19722457733622, 2.07944154167984, 2.94443897916644, 1.79175946922805, 0, 1.94591014905531, 2.70805020110221, 1.79175946922805, 2.19722457733622, 2.484906649788, 2.63905732961526, 1.6094379124341, 2.39789527279837, 2.19722457733622, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.79175946922805, 2.39789527279837, 0, 1.09861228866811, 1.79175946922805, 2.484906649788, 2.39789527279837, 2.07944154167984, 1.09861228866811, 2.07944154167984, 0.693147180559945, 2.07944154167984, 1.94591014905531, 1.6094379124341, 0, 1.38629436111989, 1.38629436111989, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.79175946922805, 1.09861228866811, 2.30258509299405, 2.07944154167984, 1.38629436111989, 1.94591014905531, 2.99573227355399, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.19722457733622, 1.09861228866811, 2.30258509299405, 1.38629436111989, 2.39789527279837, 2.19722457733622, 1.79175946922805, 2.30258509299405, 1.09861228866811, 2.30258509299405, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.94591014905531, 2.70805020110221, 2.30258509299405, 1.38629436111989, 1.09861228866811, 1.79175946922805, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.09861228866811, 2.99573227355399, 2.07944154167984, 1.94591014905531, 0, 1.79175946922805, 1.6094379124341, 2.70805020110221, 0.693147180559945, 1.09861228866811, 2.89037175789616, 2.39789527279837, 1.94591014905531, 2.07944154167984, 2.30258509299405, 0, 2.19722457733622, 2.30258509299405, 1.09861228866811, 1.94591014905531, 2.30258509299405, 1.79175946922805, 3.09104245335832, 1.79175946922805, 2.19722457733622, 1.94591014905531, 2.30258509299405, 2.07944154167984, 2.07944154167984, 0, 2.63905732961526, 2.07944154167984, 2.30258509299405, 1.79175946922805, 2.19722457733622, 1.09861228866811, 0, 2.63905732961526, 2.39789527279837, 0.693147180559945, 0, 2.07944154167984, 1.6094379124341, 1.6094379124341, 2.56494935746154, 1.79175946922805, 2.30258509299405, 2.07944154167984, 1.79175946922805, 0, 1.79175946922805, 2.63905732961526, 2.89037175789616, 1.6094379124341, 1.38629436111989, 1.09861228866811, 2.39789527279837, 2.484906649788, 1.38629436111989, 1.38629436111989, 2.39789527279837, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0.693147180559945, 1.6094379124341, 1.09861228866811, 2.56494935746154, 1.38629436111989, 1.6094379124341, 2.77258872223978, 1.94591014905531, 2.39789527279837, 1.94591014905531, 1.6094379124341, 2.484906649788, 2.39789527279837, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.39789527279837, 2.77258872223978, 2.89037175789616, 0, 1.09861228866811, 1.79175946922805, 2.19722457733622, 2.07944154167984, 2.30258509299405, 1.09861228866811, 2.07944154167984, 2.56494935746154, 0.693147180559945, 2.19722457733622, 1.6094379124341, 2.83321334405622, 2.19722457733622, 1.94591014905531, 2.07944154167984, 1.09861228866811, 2.30258509299405, 2.39789527279837, 0.693147180559945, 2.07944154167984, 1.09861228866811, 2.89037175789616, 1.79175946922805, 2.77258872223978, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.09861228866811, 2.39789527279837, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.94591014905531, 2.63905732961526, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.79175946922805, 1.94591014905531, 2.19722457733622, 1.09861228866811, 2.484906649788, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.94591014905531, 2.19722457733622, 2.70805020110221, 2.484906649788, 0.693147180559945, 2.19722457733622, 1.94591014905531, 1.09861228866811, 2.07944154167984, 1.38629436111989, 1.94591014905531, 2.39789527279837, 0.693147180559945, 0, 0, 2.30258509299405, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.09861228866811, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.38629436111989, 1.94591014905531, 1.38629436111989, 2.07944154167984, 2.56494935746154, 2.30258509299405, 1.79175946922805, 2.77258872223978, 2.07944154167984, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.94591014905531, 2.39789527279837, 2.56494935746154, 1.09861228866811, 1.6094379124341, 2.07944154167984, 2.484906649788, 1.94591014905531, 1.38629436111989, 2.30258509299405, 1.94591014905531, 0.693147180559945, 1.6094379124341, 2.99573227355399, 2.07944154167984, 2.39789527279837, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.79175946922805, 1.94591014905531, 2.484906649788, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.79175946922805, 2.07944154167984, 2.07944154167984, 2.07944154167984, 1.79175946922805, 1.38629436111989, 2.99573227355399, 2.77258872223978, 2.83321334405622, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.30258509299405, 1.79175946922805, 2.30258509299405, 2.39789527279837, 1.09861228866811, 2.30258509299405, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 1.6094379124341, 2.39789527279837, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.6094379124341, 2.77258872223978, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0.693147180559945, 2.19722457733622, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 2.19722457733622, 1.38629436111989, 1.09861228866811, 0, 0, 2.30258509299405, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 2.30258509299405, 0.693147180559945, 0, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.63905732961526, 1.38629436111989, 1.79175946922805, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 2.56494935746154, 0, 0, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 2.39789527279837, 2.19722457733622, 0.693147180559945, 1.94591014905531, 1.38629436111989, 1.09861228866811, 2.30258509299405, 2.07944154167984, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.94591014905531, 0.693147180559945, 2.99573227355399, 2.70805020110221, 1.79175946922805, 0.693147180559945, 0.693147180559945, 2.484906649788, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0, 0, 1.6094379124341, 2.30258509299405, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 0, 2.07944154167984, 1.09861228866811, 1.09861228866811, 2.39789527279837, 0.693147180559945, 0, 1.6094379124341, 1.6094379124341, 1.94591014905531, 1.09861228866811, 0, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.38629436111989, 2.30258509299405, 2.39789527279837, 1.6094379124341, 0, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0, 0, 2.63905732961526, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0, 1.38629436111989, 0, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.79175946922805, 2.484906649788, 1.09861228866811, 2.07944154167984, 1.38629436111989, 0, 1.38629436111989, 1.94591014905531, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 2.07944154167984, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 3.17805383034795, 2.19722457733622, 2.56494935746154, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0, 2.19722457733622, 1.09861228866811, 1.79175946922805, 2.19722457733622, 1.09861228866811, 0, 1.38629436111989, 1.94591014905531, 1.38629436111989, 0, 2.07944154167984, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.30258509299405, 1.79175946922805, 0, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 0, 1.6094379124341, 1.94591014905531, 2.07944154167984, 0.693147180559945, 1.79175946922805, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.6094379124341, 1.09861228866811, 2.07944154167984, 1.6094379124341, 0, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.19722457733622, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 2.19722457733622, 0, 1.94591014905531, 0, 2.77258872223978, 1.38629436111989, 1.6094379124341, 0, 0, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.79175946922805, 2.39789527279837, 1.79175946922805, 2.30258509299405, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 2.07944154167984, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0, 1.09861228866811, 2.19722457733622, 0, 1.94591014905531, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.09861228866811, 2.39789527279837, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0.693147180559945, 2.30258509299405, 1.79175946922805, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.6094379124341, 2.07944154167984, 0.693147180559945, 1.38629436111989, 2.39789527279837, 1.94591014905531, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.94591014905531, 2.63905732961526, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.79175946922805, 2.30258509299405, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 1.38629436111989, 2.19722457733622, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.30258509299405, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.94591014905531, 0.693147180559945, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 2.63905732961526, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0, 1.09861228866811, 1.79175946922805, 2.484906649788, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 0.693147180559945, 0, 2.07944154167984, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 2.39789527279837, 0, 1.79175946922805, 1.38629436111989, 2.07944154167984, 2.07944154167984, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0, 0, 2.19722457733622, 1.79175946922805, 1.38629436111989, 1.38629436111989, 0, 0, 1.79175946922805, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 2.39789527279837, 1.94591014905531, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 2.07944154167984, 0, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.56494935746154, 0.693147180559945, 1.38629436111989, 0, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 2.94443897916644, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.6094379124341, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.94591014905531, 0, 1.79175946922805, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 1.38629436111989, 2.19722457733622, 0, 0, 2.56494935746154, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 2.19722457733622, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.94591014905531, 1.09861228866811, 2.56494935746154, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 2.70805020110221, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0, 0, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.6094379124341, 2.07944154167984, 0, 0.693147180559945, 1.79175946922805, 0, 1.79175946922805, 1.09861228866811, 0, 0, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0, 1.6094379124341, 2.19722457733622, 1.79175946922805, 0, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.79175946922805, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0, 0, 0, 0, 2.07944154167984, 0.693147180559945, 0, 1.79175946922805, 2.30258509299405, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.38629436111989, 0, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.79175946922805, 0, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.6094379124341, 0, 2.19722457733622, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 2.07944154167984, 0, 1.6094379124341, 0, 1.6094379124341, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.09861228866811, 2.19722457733622, 0, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.94591014905531, 1.6094379124341, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 2.07944154167984, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 2.07944154167984, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0, 0, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 0, 0, 1.94591014905531, 0, 1.09861228866811, 1.09861228866811, 2.39789527279837, 0.693147180559945, 2.63905732961526, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.19722457733622, 1.38629436111989, 1.94591014905531, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.38629436111989, 0, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.94591014905531, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 1.94591014905531, 0, 1.94591014905531, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.94591014905531, 2.39789527279837, 1.6094379124341, 2.19722457733622, 1.79175946922805, 2.39789527279837, 2.07944154167984, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.07944154167984, 2.07944154167984, 1.09861228866811, 0, 2.30258509299405, 0.693147180559945, 0.693147180559945, 2.19722457733622, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 1.94591014905531, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 2.39789527279837, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0, 2.484906649788, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.09861228866811, 2.30258509299405, 0.693147180559945, 0.693147180559945, 0, 2.19722457733622, 1.38629436111989, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.94591014905531, 2.19722457733622, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0, 2.484906649788, 1.6094379124341, 2.07944154167984, 0, 0, 2.19722457733622, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.94591014905531, 2.19722457733622, 1.94591014905531, 2.07944154167984, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0.693147180559945, 2.19722457733622, 1.94591014905531, 1.94591014905531, 0, 1.79175946922805, 0, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0.693147180559945, 2.07944154167984, 2.07944154167984, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.94591014905531, 1.6094379124341, 0, 1.09861228866811, 2.39789527279837, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0, 1.38629436111989, 1.09861228866811, 1.94591014905531, 0, 0, 0.693147180559945, 1.38629436111989, 1.94591014905531, 0, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 2.07944154167984, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 2.484906649788, 1.09861228866811, 0, 2.484906649788, 2.30258509299405, 0, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.6094379124341, 2.56494935746154, 1.94591014905531, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 0, 1.94591014905531, 2.07944154167984, 2.07944154167984, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.38629436111989, 2.39789527279837, 0, 2.39789527279837, 1.38629436111989, 0, 2.19722457733622, 1.09861228866811, 0, 1.6094379124341, 2.30258509299405, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.6094379124341, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 2.39789527279837, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.07944154167984, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.09861228866811, 1.79175946922805, 1.94591014905531, 0, 0, 1.94591014905531, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 0, 1.6094379124341, 2.77258872223978, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 2.30258509299405, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.79175946922805, 0.693147180559945, 1.79175946922805, 0, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 0, 0.693147180559945, 0, 1.79175946922805, 1.94591014905531, 2.484906649788, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 0, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0.693147180559945, 2.19722457733622, 1.09861228866811, 2.19722457733622, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 0, 0, 1.38629436111989, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.07944154167984, 2.30258509299405, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.94591014905531, 1.38629436111989, 2.19722457733622, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.94591014905531, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 2.19722457733622, 2.30258509299405, 0, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.09861228866811, 0.693147180559945, 2.07944154167984, 1.6094379124341, 1.6094379124341, 0, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.09861228866811, 2.07944154167984, 2.07944154167984, 1.09861228866811, 0.693147180559945, 2.70805020110221, 0.693147180559945, 2.30258509299405, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.6094379124341, 0, 2.39789527279837, 1.09861228866811, 1.94591014905531, 1.09861228866811, 2.70805020110221, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.484906649788, 1.09861228866811, 0, 0.693147180559945, 2.19722457733622, 1.38629436111989, 1.94591014905531, 0, 0, 1.6094379124341, 1.6094379124341, 0, 0, 1.09861228866811, 0.693147180559945, 0, 2.30258509299405, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0, 2.07944154167984, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.79175946922805, 2.77258872223978, 1.38629436111989, 1.38629436111989, 1.6094379124341, 2.19722457733622, 1.79175946922805, 1.6094379124341, 0.693147180559945, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.38629436111989, 0, 0, 2.30258509299405, 0, 1.79175946922805, 1.6094379124341, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0, 0, 0, 1.38629436111989, 2.07944154167984, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 2.19722457733622, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.07944154167984, 2.83321334405622, 1.6094379124341, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0, 1.79175946922805, 0.693147180559945, 0, 1.38629436111989, 1.94591014905531, 1.6094379124341, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 0, 2.30258509299405, 1.6094379124341, 1.38629436111989, 1.09861228866811, 2.19722457733622, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.07944154167984, 1.79175946922805, 2.19722457733622, 1.94591014905531, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.79175946922805, 2.39789527279837, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0, 0, 1.09861228866811, 0.693147180559945, 1.94591014905531, 2.63905732961526, 1.79175946922805, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.39789527279837, 0, 2.30258509299405, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.94591014905531, 2.07944154167984, 0, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.30258509299405, 1.38629436111989, 0, 2.07944154167984, 1.38629436111989, 2.07944154167984, 1.6094379124341, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 1.38629436111989, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.38629436111989, 2.07944154167984, 0, 0, 2.07944154167984, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 2.07944154167984, 1.79175946922805, 1.38629436111989, 2.30258509299405, 0.693147180559945, 1.6094379124341, 2.07944154167984, 1.09861228866811, 2.07944154167984, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 2.07944154167984, 2.484906649788, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0, 1.79175946922805, 0, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.07944154167984, 1.79175946922805, 1.09861228866811, 1.09861228866811, 2.63905732961526, 1.6094379124341, 2.39789527279837, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0, 1.6094379124341, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 2.19722457733622, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0, 0, 1.6094379124341, 1.94591014905531, 0, 0, 0, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.38629436111989, 2.07944154167984, 0, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.63905732961526, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.09861228866811, 2.484906649788, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.94591014905531, 0.693147180559945, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.38629436111989, 3.58351893845611, 1.09861228866811, 1.09861228866811, 1.79175946922805, 0, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0, 0, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 2.39789527279837, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0, 2.484906649788, 0.693147180559945, 0, 1.38629436111989, 1.94591014905531, 0, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.94591014905531, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 0, 0, 0, 1.38629436111989, 1.38629436111989, 0, 0, 0.693147180559945, 2.07944154167984, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.94591014905531, 2.07944154167984, 0.693147180559945, 0, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0, 0, 1.6094379124341, 1.6094379124341, 0, 0.693147180559945, 0, 2.484906649788, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0, 1.79175946922805, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 1.79175946922805, 0, 1.79175946922805, 1.79175946922805, 0, 1.94591014905531, 0, 1.38629436111989, 1.79175946922805, 0, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 0, 0.693147180559945, 0, 0, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 0.693147180559945, 1.6094379124341, 1.79175946922805, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0, 0, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0, 0.693147180559945, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0, 0, 0, 2.39789527279837, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 1.6094379124341, 1.79175946922805, 2.07944154167984, 2.70805020110221, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 0, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 2.94443897916644, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 0, 0, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0.693147180559945, 0, 0, 0, 0.693147180559945, 1.79175946922805, 0, 0, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 0, 1.94591014905531, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 0, 0, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 2.30258509299405, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.19722457733622, 0, 2.19722457733622, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 0, 0.693147180559945, 0, 2.19722457733622, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 2.99573227355399, 0, 0, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.6094379124341, 2.07944154167984, 0, 0.693147180559945, 0, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 0, 1.09861228866811, 0, 1.6094379124341, 1.79175946922805, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 1.38629436111989, 1.38629436111989, 0, 1.6094379124341, 0, 1.38629436111989, 0.693147180559945, 0, 1.79175946922805, 0, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 0, 0, 1.09861228866811, 1.6094379124341, 0, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 0, 0, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 2.30258509299405, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0, 1.6094379124341, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0, 1.09861228866811, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.79175946922805, 0, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 0, 0, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 2.77258872223978, 1.6094379124341, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0, 0, 0, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 1.6094379124341, 0, 0, 0.693147180559945, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0, 0, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 0, 0, 2.07944154167984, 0, 0, 1.6094379124341, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0, 0, 2.07944154167984, 0, 0, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 1.79175946922805, 0, 2.30258509299405, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 1.09861228866811, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0, 1.6094379124341, 0, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 1.6094379124341, 0, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 2.19722457733622, 1.79175946922805, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 2.07944154167984, 1.09861228866811, 1.38629436111989, 0, 0, 0, 0, 1.09861228866811, 0, 2.07944154167984, 1.09861228866811, 0, 0, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0.693147180559945, 2.30258509299405, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0, 0, 1.09861228866811, 1.6094379124341, 0, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.94591014905531, 0, 1.94591014905531, 0.693147180559945, 0, 2.39789527279837, 2.77258872223978, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 0, 2.19722457733622, 1.09861228866811, 2.19722457733622, 0, 0, 1.6094379124341, 2.19722457733622, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 2.30258509299405, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0, 0, 0, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 2.63905732961526, 0, 0, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 2.19722457733622, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 0, 1.94591014905531, 0.693147180559945, 0, 0, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0, 2.19722457733622, 1.38629436111989, 2.30258509299405, 0, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 0, 1.38629436111989, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 0, 0, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0, 0, 0, 0, 0, 1.09861228866811, 0, 0, 1.6094379124341, 0, 1.79175946922805, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 1.38629436111989, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 0, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 0, 2.19722457733622, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 2.07944154167984, 1.09861228866811, 0, 0, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.79175946922805, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 2.07944154167984, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 0, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.484906649788, 1.09861228866811, 1.09861228866811, 2.30258509299405, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.94591014905531, 1.79175946922805, 1.79175946922805, 0, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0, 1.09861228866811, 0, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 2.19722457733622, 0, 0, 0.693147180559945, 2.30258509299405, 1.09861228866811, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 1.79175946922805, 1.6094379124341, 0, 0.693147180559945, 1.38629436111989, 0, 2.07944154167984, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 0, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 2.19722457733622, 0.693147180559945, 1.38629436111989, 0, 0, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0, 0, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0, 0, 1.79175946922805, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 2.83321334405622, 0, 0.693147180559945, 1.38629436111989, 0, 1.38629436111989, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 1.38629436111989, 0, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0, 1.94591014905531, 1.09861228866811, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 1.38629436111989, 2.07944154167984, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.38629436111989, 0, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 2.19722457733622, 1.94591014905531, 0, 1.38629436111989, 1.79175946922805, 2.39789527279837, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 2.56494935746154, 1.94591014905531, 1.38629436111989, 0, 0.693147180559945, 2.39789527279837, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 2.39789527279837, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.94591014905531, 2.07944154167984, 1.38629436111989, 0, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.94591014905531, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 2.63905732961526, 1.38629436111989, 0.693147180559945, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.94591014905531, 0.693147180559945, 0, 1.94591014905531, 1.09861228866811, 2.484906649788, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 2.07944154167984, 2.39789527279837, 2.30258509299405, 1.09861228866811, 1.38629436111989, 0.693147180559945, 2.484906649788, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.6094379124341, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0, 1.79175946922805, 2.30258509299405, 2.07944154167984, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0, 0, 2.56494935746154, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.07944154167984, 2.07944154167984, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 2.39789527279837, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.38629436111989, 0, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 2.39789527279837, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.6094379124341, 1.6094379124341, 0, 1.79175946922805, 2.30258509299405, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.63905732961526, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.79175946922805, 0, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.94591014905531, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 2.89037175789616, 1.94591014905531, 1.79175946922805, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0.693147180559945, 2.39789527279837, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0, 2.07944154167984, 1.09861228866811, 2.07944154167984, 2.07944154167984, 0.693147180559945, 2.07944154167984, 1.79175946922805, 3.09104245335832, 2.19722457733622, 0.693147180559945, 0.693147180559945, 2.30258509299405, 1.38629436111989, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0, 1.38629436111989, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.79175946922805, 2.63905732961526, 0.693147180559945, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.09861228866811, 2.19722457733622, 1.38629436111989, 0, 1.79175946922805, 1.79175946922805, 1.6094379124341, 0, 1.38629436111989, 2.30258509299405, 1.09861228866811, 1.09861228866811, 2.30258509299405, 0, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.39789527279837, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.6094379124341, 2.07944154167984, 1.09861228866811, 2.19722457733622, 2.30258509299405, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0, 1.6094379124341, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.6094379124341, 2.07944154167984, 1.79175946922805, 1.6094379124341, 1.79175946922805, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.79175946922805, 1.09861228866811, 0, 1.6094379124341, 0, 2.07944154167984, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.30258509299405, 0, 1.79175946922805, 2.07944154167984, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.09861228866811, 2.19722457733622, 0, 0, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.94591014905531, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.94591014905531, 0.693147180559945, 1.6094379124341, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.6094379124341, 2.07944154167984, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 2.70805020110221, 0, 0.693147180559945, 1.09861228866811, 0, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.79175946922805, 2.19722457733622, 1.79175946922805, 2.39789527279837, 2.30258509299405, 2.30258509299405, 1.09861228866811, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.94591014905531, 2.30258509299405, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.39789527279837, 1.09861228866811, 2.19722457733622, 0.693147180559945, 0.693147180559945, 0, 2.30258509299405, 0, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.39789527279837, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.6094379124341, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.79175946922805, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0, 2.39789527279837, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 1.38629436111989, 1.94591014905531, 0.693147180559945, 0, 2.39789527279837, 1.38629436111989, 0, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.94591014905531, 0.693147180559945, 0, 2.484906649788, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.6094379124341, 1.38629436111989, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 0, 1.79175946922805, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 2.39789527279837, 1.09861228866811, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.6094379124341, 2.39789527279837, 0, 1.09861228866811, 2.30258509299405, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.30258509299405, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.79175946922805, 1.94591014905531, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.07944154167984, 1.38629436111989, 0.693147180559945, 0.693147180559945, 2.07944154167984, 2.07944154167984, 0.693147180559945, 2.39789527279837, 0, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.6094379124341, 0.693147180559945, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 2.484906649788, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0, 2.07944154167984, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 0, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.94591014905531, 0, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 1.79175946922805, 1.38629436111989, 1.94591014905531, 0, 0.693147180559945, 1.6094379124341, 2.484906649788, 1.6094379124341, 1.09861228866811, 1.09861228866811, 2.30258509299405, 0, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 2.07944154167984, 2.19722457733622, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 1.38629436111989, 0, 1.6094379124341, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0, 1.79175946922805, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.39789527279837, 2.56494935746154, 0.693147180559945, 0, 0, 0, 1.79175946922805, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0, 1.79175946922805, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.6094379124341, 1.6094379124341, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 1.09861228866811, 0, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 2.39789527279837, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 0, 0, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 0, 2.30258509299405, 1.79175946922805, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 1.94591014905531, 0, 1.6094379124341, 0, 0.693147180559945, 0, 1.38629436111989, 2.19722457733622, 1.6094379124341, 0, 1.6094379124341, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0, 0, 2.39789527279837, 0.693147180559945, 0, 0, 1.09861228866811, 0, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 2.19722457733622, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0.693147180559945, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.94591014905531, 1.38629436111989, 0, 0, 1.38629436111989, 1.38629436111989, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 2.63905732961526, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.6094379124341, 0, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 1.38629436111989, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 0, 0, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0, 0, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 2.07944154167984, 0, 0.693147180559945, 0, 0, 2.19722457733622, 0, 0, 0, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0, 2.484906649788, 1.38629436111989, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 0, 0, 1.79175946922805, 1.09861228866811, 0, 0, 0, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 2.39789527279837, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 0, 0, 0, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.38629436111989, 2.30258509299405, 0, 0, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.94591014905531, 0, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.19722457733622, 0.693147180559945, 1.79175946922805, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 1.6094379124341, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 2.30258509299405, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.38629436111989, 0, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 0, 0, 1.6094379124341, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 0, 0.693147180559945, 0, 1.09861228866811, 2.484906649788, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.79175946922805, 0, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.30258509299405, 0, 1.09861228866811, 0.693147180559945, 0, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0, 2.07944154167984, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0, 0, 0, 0, 1.6094379124341, 0, 1.09861228866811, 2.07944154167984, 0, 0, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 1.38629436111989, 0, 1.79175946922805, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 0, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 0, 0, 0, 2.19722457733622, 0, 1.79175946922805, 1.38629436111989, 1.09861228866811, 0, 0, 0, 1.09861228866811, 2.39789527279837, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 1.38629436111989, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.6094379124341, 2.07944154167984, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 2.19722457733622, 0, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0.693147180559945, 2.30258509299405, 0.693147180559945, 1.6094379124341, 1.94591014905531, 0, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 1.09861228866811, 0, 0, 1.09861228866811, 2.63905732961526, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.6094379124341, 2.77258872223978, 1.38629436111989, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 0, 1.94591014905531, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0, 0, 0, 0.693147180559945, 1.6094379124341, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0, 0, 0, 0.693147180559945 }; + var targets = new double[] { 1, 7, 7, 2, 3, 7, 7, 7, 2, 7, 7, 7, 7, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 3, 7, 2, 7, 7, 7, 7, 7, 5, 3, 7, 7, 7, 7, 7, 7, 3, 7, 7, 6, 7, 7, 2, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 2, 2, 7, 7, 17, 7, 2, 7, 1, 7, 7, 7, 7, 7, 7, 17, 7, 7, 7, 7, 7, 17, 7, 2, 7, 7, 1, 7, 1, 7, 7, 17, 7, 7, 1, 7, 7, 7, 7, 7, 7, 7, 2, 2, 2, 2, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 0, 7, 1, 7, 1, 7, 7, 7, 7, 2, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 2, 2, 7, 2, 2, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 2, 2, 7, 2, 7, 1, 7, 2, 7, 7, 1, 7, 7, 7, 5, 2, 2, 7, 7, 2, 7, 7, 7, 7, 7, 17, 7, 2, 2, 1, 2, 7, 2, 7, 7, 2, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 2, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 2, 7, 7, 17, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 2, 7, 7, 2, 7, 14, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1, 7, 7, 7, 7, 7, 7, 2, 7, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 7, 7, 3, 2, 2, 7, 7, 2, 2, 2, 2, 7, 7, 1, 2, 2, 2, 2, 2, 2, 17, 17, 2, 7, 7, 2, 1, 2, 2, 2, 3, 2, 2, 2, 2, 2, 3, 3, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 3, 1, 2, 3, 2, 2, 3, 2, 3, 3, 2, 2, 2, 2, 2, 2, 1, 3, 3, 1, 3, 3, 1, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 3, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 1, 14, 2, 2, 2, 1, 3, 1, 2, 1, 2, 2, 2, 1, 1, 3, 2, 2, 1, 2, 1, 2, 7, 2, 2, 2, 1, 2, 4, 2, 2, 2, 3, 1, 3, 1, 3, 2, 3, 3, 2, 3, 2, 2, 2, 1, 2, 1, 2, 2, 3, 2, 2, 2, 1, 2, 2, 14, 2, 2, 2, 3, 3, 2, 1, 2, 3, 2, 2, 1, 3, 2, 2, 2, 2, 1, 1, 2, 2, 3, 2, 3, 3, 1, 1, 2, 2, 2, 1, 2, 3, 2, 3, 2, 2, 4, 14, 2, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 1, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 3, 3, 7, 3, 2, 2, 1, 1, 3, 2, 2, 2, 1, 2, 3, 2, 1, 2, 3, 2, 1, 2, 2, 1, 2, 2, 2, 2, 3, 1, 2, 3, 2, 3, 1, 2, 15, 2, 2, 2, 13, 3, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 1, 1, 2, 3, 2, 3, 2, 2, 2, 3, 3, 2, 3, 3, 2, 2, 2, 3, 3, 2, 2, 3, 2, 2, 3, 17, 3, 2, 2, 3, 3, 2, 2, 3, 2, 2, 3, 1, 2, 3, 3, 7, 2, 2, 3, 2, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 3, 3, 3, 1, 3, 2, 3, 1, 3, 2, 2, 3, 3, 2, 2, 3, 3, 3, 3, 3, 2, 3, 2, 3, 2, 3, 2, 1, 2, 2, 3, 2, 3, 1, 2, 3, 2, 2, 3, 2, 2, 1, 2, 3, 2, 2, 2, 2, 1, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 2, 1, 2, 1, 1, 13, 15, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 1, 2, 14, 1, 1, 14, 1, 16, 15, 2, 2, 3, 2, 2, 2, 2, 14, 2, 1, 2, 2, 2, 2, 14, 13, 5, 13, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 2, 13, 1, 14, 2, 1, 2, 1, 1, 2, 2, 1, 7, 2, 1, 1, 1, 1, 2, 2, 2, 3, 3, 1, 1, 2, 2, 2, 2, 14, 2, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 3, 2, 1, 2, 1, 14, 16, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 3, 2, 1, 2, 2, 14, 13, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 13, 14, 3, 2, 1, 3, 2, 2, 2, 6, 2, 2, 2, 1, 1, 14, 3, 13, 17, 1, 14, 2, 2, 1, 2, 3, 2, 3, 2, 2, 1, 1, 2, 2, 1, 2, 15, 3, 14, 1, 1, 2, 1, 1, 1, 1, 2, 2, 14, 1, 14, 1, 1, 2, 2, 1, 2, 2, 1, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 1, 1, 1, 16, 2, 2, 1, 2, 1, 14, 16, 14, 14, 14, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 3, 14, 13, 2, 14, 14, 1, 3, 1, 2, 14, 2, 1, 1, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 2, 1, 2, 2, 14, 2, 2, 1, 2, 2, 3, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 2, 2, 1, 2, 2, 2, 3, 1, 4, 2, 2, 2, 2, 1, 2, 1, 3, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 13, 1, 1, 2, 1, 13, 1, 2, 2, 2, 2, 2, 16, 1, 2, 1, 2, 1, 1, 2, 2, 3, 2, 1, 2, 1, 2, 1, 2, 1, 1, 2, 3, 2, 1, 2, 2, 1, 2, 2, 3, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 5, 2, 1, 1, 2, 3, 2, 2, 2, 2, 3, 3, 2, 2, 2, 1, 1, 3, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 2, 1, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 14, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 2, 2, 2, 1, 3, 2, 3, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2, 17, 2, 2, 2, 3, 2, 1, 2, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 3, 1, 1, 2, 2, 2, 2, 2, 2, 2, 14, 3, 1, 13, 2, 2, 1, 2, 1, 1, 2, 2, 1, 1, 6, 2, 1, 1, 2, 3, 1, 2, 7, 2, 2, 2, 1, 1, 1, 2, 2, 13, 2, 1, 2, 14, 1, 1, 1, 2, 3, 1, 2, 2, 2, 2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 2, 6, 1, 2, 2, 2, 1, 1, 2, 1, 2, 5, 1, 2, 1, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 7, 2, 1, 1, 2, 2, 1, 2, 2, 13, 2, 2, 2, 1, 2, 3, 2, 2, 2, 2, 2, 1, 3, 3, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 17, 14, 14, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 1, 1, 1, 2, 2, 1, 2, 3, 1, 1, 2, 2, 2, 15, 2, 2, 1, 1, 16, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 14, 2, 1, 3, 1, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 3, 1, 1, 1, 1, 2, 2, 1, 3, 2, 3, 2, 2, 1, 2, 3, 14, 3, 17, 3, 2, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 3, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 1, 2, 3, 2, 3, 3, 2, 2, 13, 2, 1, 1, 1, 3, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 3, 3, 2, 2, 1, 2, 2, 3, 1, 2, 3, 1, 2, 2, 3, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 3, 3, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 2, 1, 2, 3, 3, 2, 2, 1, 1, 2, 2, 1, 2, 1, 2, 2, 2, 3, 3, 2, 1, 2, 2, 3, 2, 3, 3, 2, 1, 7, 2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 2, 2, 3, 1, 2, 2, 2, 1, 3, 2, 2, 2, 2, 7, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 3, 2, 1, 2, 1, 2, 3, 2, 1, 2, 2, 2, 17, 2, 1, 1, 2, 1, 2, 2, 3, 2, 2, 3, 1, 1, 1, 2, 2, 2, 2, 1, 3, 3, 3, 2, 2, 2, 2, 1, 1, 3, 2, 2, 2, 2, 3, 2, 1, 7, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 3, 3, 3, 2, 1, 2, 2, 2, 3, 3, 2, 2, 3, 3, 2, 2, 2, 3, 3, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 16, 1, 2, 1, 2, 2, 2, 2, 2, 3, 2, 2, 1, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 3, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 2, 13, 2, 1, 1, 2, 1, 1, 1, 1, 2, 3, 13, 2, 2, 2, 2, 2, 3, 2, 2, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 1, 1, 2, 2, 14, 1, 3, 2, 14, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 13, 2, 3, 2, 2, 2, 2, 2, 2, 17, 1, 1, 3, 15, 1, 2, 2, 2, 2, 14, 2, 2, 2, 14, 2, 1, 2, 2, 2, 3, 1, 2, 2, 2, 2, 1, 2, 1, 13, 1, 2, 1, 2, 2, 16, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 1, 17, 2, 2, 2, 1, 1, 1, 2, 1, 3, 13, 14, 2, 3, 2, 3, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 17, 3, 2, 1, 3, 1, 3, 6, 3, 2, 4, 3, 2, 3, 2, 2, 14, 1, 2, 14, 1, 3, 2, 2, 1, 2, 2, 3, 1, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 1, 14, 2, 1, 2, 2, 3, 2, 3, 3, 3, 2, 3, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 17, 1, 2, 15, 2, 2, 1, 3, 2, 1, 2, 2, 3, 2, 2, 16, 2, 1, 1, 2, 2, 2, 3, 1, 1, 2, 3, 2, 2, 2, 2, 3, 1, 2, 2, 3, 2, 3, 16, 2, 2, 3, 3, 3, 1, 1, 2, 2, 3, 3, 2, 2, 2, 3, 2, 2, 3, 2, 15, 14, 3, 2, 2, 2, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 3, 3, 2, 2, 3, 7, 3, 2, 2, 2, 2, 3, 2, 2, 5, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 3, 3, 3, 3, 3, 4, 2, 2, 2, 2, 2, 3, 2, 3, 3, 2, 3, 2, 13, 2, 3, 3, 14, 3, 2, 3, 2, 13, 3, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 3, 2, 3, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 3, 2, 1, 3, 3, 2, 5, 2, 2, 3, 3, 2, 3, 1, 3, 3, 2, 3, 2, 2, 3, 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 2, 3, 2, 3, 3, 3, 3, 2, 2, 2, 3, 2, 2, 3, 2, 2, 3, 3, 3, 3, 3, 2, 3, 3, 3, 15, 2, 2, 2, 3, 3, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 2, 3, 2, 3, 3, 7, 1, 2, 2, 2, 3, 3, 2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 2, 13, 2, 3, 3, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 16, 2, 2, 2, 2, 2, 3, 2, 3, 3, 2, 2, 13, 2, 3, 2, 2, 2, 2, 2, 3, 3, 14, 2, 3, 2, 3, 3, 2, 3, 2, 2, 3, 13, 2, 2, 3, 2, 2, 2, 1, 2, 2, 3, 1, 14, 2, 2, 2, 2, 1, 2, 2, 1, 3, 2, 2, 2, 3, 3, 3, 2, 1, 2, 2, 14, 3, 3, 2, 3, 2, 2, 3, 2, 2, 16, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 13, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 2, 15, 2, 2, 15, 14, 13, 2, 3, 2, 2, 3, 3, 2, 2, 16, 2, 2, 2, 2, 2, 2, 3, 2, 14, 2, 2, 14, 13, 2, 2, 2, 3, 2, 14, 2, 2, 2, 1, 2, 13, 2, 2, 2, 2, 2, 3, 13, 2, 2, 2, 2, 2, 2, 2, 2, 13, 13, 6, 2, 3, 2, 14, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 13, 2, 2, 16, 2, 13, 13, 1, 2, 2, 3, 2, 2, 3, 3, 2, 2, 2, 2, 1, 3, 2, 3, 3, 3, 6, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 3, 2, 2, 3, 2, 3, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 3, 2, 3, 2, 2, 2, 2, 3, 2, 2, 2, 3, 3, 17, 2, 3, 3, 3, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 13, 3, 2, 2, 3, 2, 2, 3, 2, 14, 2, 2, 17, 17, 2, 2, 1, 2, 3, 1, 2, 1, 1, 15, 2, 2, 2, 3, 13, 2, 2, 13, 2, 2, 2, 3, 3, 2, 3, 2, 2, 13, 7, 3, 2, 3, 2, 14, 2, 3, 2, 2, 2, 13, 2, 3, 3, 2, 14, 14, 2, 2, 2, 3, 2, 2, 2, 2, 13, 2, 2, 2, 13, 2, 2, 2, 2, 2, 3, 2, 13, 2, 2, 2, 2, 2, 2, 14, 3, 2, 2, 2, 2, 17, 1, 3, 2, 2, 2, 13, 2, 3, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 2, 3, 2, 13, 2, 2, 2, 3, 3, 3, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 16, 2, 2, 4, 2, 2, 16, 16, 2, 2, 3, 2, 2, 2, 3, 13, 2, 2, 3, 2, 2, 2, 1, 2, 2, 3, 2, 3, 3, 2, 2, 2, 2, 2, 3, 3, 2, 4, 2, 2, 4, 3, 2, 2, 3, 2, 3, 2, 3, 3, 3, 2, 2, 3, 14, 2, 2, 2, 4, 2, 3, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 15, 3, 14, 2, 2, 2, 3, 2, 2, 1, 3, 3, 2, 3, 2, 2, 3, 3, 14, 3, 2, 13, 2, 2, 2, 3, 2, 3, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 3, 2, 2, 13, 3, 2, 2, 3, 2, 3, 2, 2, 3, 3, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 13, 2, 13, 2, 1, 1, 14, 3, 2, 2, 2, 1, 13, 2, 2, 13, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 14, 2, 2, 13, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 14, 2, 2, 13, 2, 2, 2, 3, 2, 2, 2, 2, 3, 1, 1, 2, 3, 2, 2, 2, 2, 3, 3, 2, 13, 2, 14, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 1, 2, 2, 2, 3, 2, 2, 1, 1, 1, 3, 2, 3, 2, 2, 3, 2, 3, 2, 3, 2, 2, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 2, 1, 2, 1, 3, 2, 2, 1, 2, 2, 2, 2, 13, 13, 2, 2, 1, 1, 2, 1, 3, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 1, 2, 2, 3, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 1, 2, 1, 2, 2, 2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 3, 2, 3, 3, 2, 1, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 1, 1, 1, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 3, 2, 3, 2, 2, 14, 15, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 17, 2, 2, 1, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 14, 2, 2, 2, 13, 2, 3, 2, 2, 1, 2, 13, 1, 2, 1, 2, 2, 1, 2, 2, 13, 2, 3, 3, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 14, 2, 17, 13, 1, 3, 2, 3, 2, 1, 3, 3, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 14, 13, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 1, 2, 3, 2, 16, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 3, 2, 2, 1, 2, 2, 2, 2, 2, 3, 1, 13, 1, 2, 1, 2, 2, 13, 1, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 3, 14, 3, 2, 1, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 2, 2, 1, 2, 3, 1, 2, 3, 2, 1, 1, 2, 13, 2, 2, 2, 2, 3, 3, 2, 2, 1, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 3, 16, 2, 2, 1, 2, 2, 2, 2, 3, 1, 3, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 14, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 1, 2, 2, 2, 1, 2, 1, 2, 2, 2, 1, 2, 3, 2, 1, 2, 2, 1, 2, 2, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 1, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 13, 2, 2, 2, 2, 2, 1, 14, 2, 2, 2, 13, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 13, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 13, 2, 2, 2, 14, 1, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 13, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 3, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 3, 2, 1, 1, 2, 2, 1, 2, 1, 1, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 14, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2, 2, 2, 13, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 14, 1, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 1, 13, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2, 2, 2, 1, 1, 2, 1, 1, 13, 7, 2, 1, 1, 2, 1, 1, 1, 2, 2, 7, 3, 1, 1, 1, 3, 2, 2, 2, 3, 2, 2, 2, 1, 2, 2, 2, 2, 13, 3, 7, 2, 7, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 2, 14, 5, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 4, 2, 2, 1, 2, 1, 2, 2, 2, 17, 1, 1, 1, 1, 1, 2, 3, 2, 2, 1, 1, 2, 2, 1, 1, 1, 2, 1, 1, 1, 6, 1, 2, 3, 2, 2, 2, 3, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 7, 17, 1, 3, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 1, 1, 2, 14, 2, 2, 1, 7, 14, 1, 2, 1, 1, 3, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 3, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 13, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 1, 7, 1, 1, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 2, 4, 3, 2, 3, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 2, 3, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 3, 1, 1, 2, 2, 2, 1, 1, 2, 7, 7, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 7, 2, 1, 2, 1, 2, 3, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 3, 2, 2, 3, 2, 2, 1, 2, 3, 2, 2, 2, 2, 3, 1, 2, 2, 1, 2, 1, 1, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 17, 2, 14, 2, 2, 2, 2, 14, 2, 2, 2, 3, 2, 1, 2, 2, 2, 2, 3, 3, 1, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 2, 3, 1, 7, 2, 3, 2, 2, 2, 2, 2, 7, 2, 2, 3, 2, 4, 2, 2, 3, 2, 3, 2, 2, 3, 7, 2, 2, 2, 5, 3, 2, 2, 2, 2, 2, 2, 14, 3, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 3, 2, 1, 2, 1, 2, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 2, 1, 1, 7, 1, 1, 2, 2, 17, 2, 2, 1, 1, 2, 17, 2, 1, 13, 1, 17, 7, 2, 1, 2, 1, 13, 2, 1, 2, 2, 2, 1, 1, 2, 14, 2, 2, 17, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 14, 2, 7, 1, 4, 2, 17, 13, 7, 2, 1, 2, 2, 2, 1, 2, 1, 1, 7, 13, 7, 1, 2, 13, 1, 2, 2, 2, 7, 1, 2, 2, 2, 14, 1, 2, 2, 1, 7, 2, 1, 2, 2, 2, 2, 2, 14, 2, 3, 1, 1, 2, 2, 14, 2, 2, 2, 7, 2, 17, 1, 14, 2, 2, 2, 2, 17, 2, 3, 3, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 1, 2, 7, 7, 13, 1, 2, 2, 2, 3, 1, 3, 2, 5, 14, 7, 2, 1, 1, 7, 1, 1, 7, 1, 1, 2, 2, 17, 1, 2, 7, 7, 2, 1, 1, 13, 7, 1, 2, 1, 1, 1, 2, 2, 1, 17, 7, 1, 1, 1, 2, 7, 1, 1, 17, 1, 17, 1, 2, 1, 1, 1, 2, 13, 13, 2, 15, 7, 2, 7, 7, 2, 2, 7, 2, 1, 0, 2, 2, 14, 2, 1, 2, 1, 2, 1, 14, 14, 3, 3, 17, 2, 1, 1, 1, 1, 1, 1, 3, 2, 1, 14, 2, 1, 2, 1, 1, 3, 3, 1, 2, 1, 2, 1, 3, 3, 2, 1, 2, 2, 3, 2, 2, 3, 2, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 13, 14, 1, 3, 2, 17, 1, 2, 2, 2, 13, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2, 15, 2, 2, 1, 1, 2, 2, 4, 2, 1, 2, 1, 2, 2, 3, 2, 2, 2, 2, 1, 2, 2, 3, 2, 3, 1, 2, 2, 2, 1, 2, 7, 2, 1, 1, 3, 2, 14, 2, 2, 2, 2, 3, 1, 1, 1, 13, 1, 2, 2, 3, 14, 12, 14, 2, 2, 1, 2, 1, 1, 15, 1, 2, 3, 2, 3, 14, 1, 2, 2, 1, 2, 7, 2, 1, 2, 1, 1, 2, 2, 1, 2, 13, 3, 2, 2, 1, 2, 1, 2, 3, 2, 3, 1, 1, 3, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 13, 2, 1, 12, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 14, 1, 2, 3, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 13, 3, 3, 3, 2, 3, 2, 1, 3, 1, 2, 2, 7, 2, 1, 2, 1, 2, 2, 3, 2, 14, 3, 2, 2, 1, 2, 2, 2, 3, 2, 2, 2, 3, 1, 1, 2, 14, 1, 2, 1, 1, 2, 2, 2, 2, 2, 15, 2, 3, 3, 2, 2, 3, 1, 1, 2, 2, 2, 3, 2, 14, 2, 1, 3, 3, 2, 2, 3, 2, 2, 2, 3, 4, 2, 2, 2, 12, 15, 2, 14, 14, 1, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 1, 2, 3, 7, 2, 2, 13, 2, 13, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 13, 2, 3, 14, 3, 2, 1, 14, 3, 2, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 3, 2, 2, 3, 2, 1, 14, 2, 14, 2, 2, 3, 2, 2, 1, 2, 2, 2, 2, 14, 2, 2, 2, 2, 1, 3, 2, 1, 1, 2, 2, 2, 2, 14, 2, 2, 14, 13, 2, 1, 3, 2, 1, 2, 3, 1, 1, 3, 2, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 7, 2, 1, 2, 2, 7, 2, 2, 1, 14, 1, 3, 13, 2, 2, 1, 15, 2, 2, 2, 2, 2, 2, 1, 1, 13, 2, 2, 4, 2, 3, 2, 3, 1, 2, 2, 2, 2, 1, 2, 1, 1, 1, 1, 1, 3, 1, 1, 3, 14, 3, 2, 1, 2, 2, 2, 1, 2, 1, 2, 13, 1, 2, 2, 2, 2, 1, 2, 14, 1, 2, 2, 1, 2, 1, 1, 1, 3, 2, 2, 2, 13, 2, 1, 2, 13, 2, 2, 2, 3, 1, 1, 1, 1, 1, 14, 3, 1, 2, 2, 3, 2, 3, 2, 1, 2, 1, 14, 2, 1, 2, 1, 2, 2, 2, 2, 13, 14, 2, 13, 1, 2, 2, 2, 2, 4, 1, 1, 2, 14, 1, 2, 1, 3, 2, 1, 1, 1, 2, 2, 2, 6, 2, 2, 2, 14, 2, 2, 14, 2, 2, 1, 1, 1, 1, 2, 17, 2, 1, 2, 1, 3, 12, 1, 2, 2, 2, 1, 3, 1, 1, 2, 1, 2, 1, 2, 1, 1, 1, 14, 2, 13, 13, 1, 2, 13, 1, 1, 1, 2, 14, 1, 3, 3, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 13, 1, 1, 1, 12, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 6, 1, 13, 2, 1, 13, 13, 2, 1, 1, 1, 1, 1, 13, 3, 2, 2, 2, 2, 1, 2, 1, 2, 6, 1, 1, 3, 1, 17, 1, 12, 2, 3, 2, 1, 2, 1, 1, 1, 2, 3, 13, 3, 3, 3, 2, 1, 13, 1, 1, 14, 7, 2, 17, 1, 1, 1, 1, 14, 1, 12, 2, 7, 1, 12, 2, 2, 1, 2, 14, 13, 1, 3, 13, 2, 1, 2, 1, 2, 2, 1, 14, 2, 3, 1, 1, 17, 1, 1, 13, 1, 2, 2, 1, 13, 1, 3, 17, 1, 1, 1, 2, 13, 1, 1, 14, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 2, 17, 1, 2, 1, 17, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 3, 1, 13, 1, 1, 2, 1, 2, 1, 1, 1, 2, 1, 16, 6, 2, 1, 2, 1, 1, 17, 1, 3, 1, 2, 2, 2, 1, 2, 1, 2, 1, 3, 1, 2, 2, 17, 2, 1, 3, 1, 2, 1, 1, 12, 13, 2, 2, 2, 1, 1, 2, 2, 1, 2, 1, 2, 2, 1, 12, 13, 3, 1, 1, 1, 2, 17, 2, 2, 1, 1, 1, 1, 1, 12, 1, 2, 2, 1, 1, 1, 13, 1, 1, 1, 1, 13, 1, 14, 1, 1, 1, 1, 1, 1, 2, 2, 13, 1, 2, 1, 2, 2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 2, 1, 1, 1, 1, 2, 14, 17, 2, 2, 2, 17, 1, 2, 2, 13, 1, 1, 2, 13, 2, 2, 1, 1, 2, 1, 1, 2, 1, 2, 2, 1, 1, 1, 1, 1, 1, 17, 1, 17, 3, 3, 2, 2, 1, 13, 1, 13, 12, 2, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 2, 2, 12, 1, 17, 2, 12, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 7, 2, 2, 1, 2, 2, 1, 3, 2, 1, 1, 1, 2, 2, 17, 3, 1, 2, 2, 2, 1, 3, 17, 2, 1, 2, 1, 2, 1, 2, 1, 1, 1, 2, 12, 17, 1, 1, 2, 1, 1, 1, 2, 13, 2, 2, 1, 1, 15, 2, 17, 1, 14, 1, 1, 1, 3, 1, 2, 2, 2, 2, 2, 3, 1, 1, 2, 1, 1, 1, 2, 1, 13, 7, 1, 1, 17, 2, 14, 2, 1, 1, 2, 4, 1, 2, 1, 1, 14, 2, 2, 1, 2, 17, 17, 2, 1, 1, 2, 17, 17, 17, 2, 1, 1, 2, 3, 2, 2, 13, 3, 13, 1, 17, 17, 17, 17, 2, 7, 2, 1, 17, 1, 2, 1, 15, 3, 13, 1, 3, 1, 2, 12, 15, 3, 2, 1, 1, 3, 2, 1, 2, 1, 2, 17, 2, 1, 3, 1, 1, 3, 1, 1, 1, 2, 15, 17, 1, 13, 12, 2, 2, 1, 1, 13, 1, 1, 1, 17, 1, 1, 1, 2, 12, 2, 1, 1, 1, 1, 17, 1, 17, 1, 2, 12, 17, 17, 2, 3, 14, 15, 2, 14, 17, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 14, 1, 1, 17, 2, 2, 12, 1, 1, 1, 1, 1, 1, 1, 14, 17, 1, 1, 2, 1, 17, 3, 1, 1, 2, 13, 3, 1, 1, 1, 1, 1, 13, 1, 1, 1, 13, 1, 2, 1, 3, 2, 13, 7, 1, 14, 14, 14, 13, 17, 13, 1, 3, 1, 17, 17, 13, 1, 1, 1, 12, 17, 1, 1, 13, 15, 1, 2, 1, 1, 1, 1, 2, 1, 1, 12, 12, 13, 1, 1, 2, 12, 12, 1, 3, 17, 2, 12, 17, 1, 1, 3, 2, 1, 3, 12, 2, 2, 3, 2, 14, 14, 1, 1, 2, 12, 1, 12, 12, 14, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 13, 13, 17, 2, 1, 1, 1, 2, 1, 15, 2, 17, 2, 2, 1, 1, 2, 1, 14, 2, 1, 2, 1, 1, 3, 2, 3, 1, 2, 3, 2, 1, 2, 17, 1, 2, 2, 1, 2, 2, 3, 15, 1, 3, 13, 3, 1, 1, 2, 3, 14, 1, 13, 2, 2, 1, 14, 2, 1, 2, 2, 1, 17, 12, 12, 1, 1, 2, 1, 1, 1, 2, 2, 13, 13, 1, 17, 1, 2, 17, 1, 1, 1, 1, 1, 1, 2, 14, 1, 1, 3, 13, 12, 12, 2, 1, 1, 2, 2, 3, 3, 1, 3, 1, 3, 1, 1, 2, 2, 13, 1, 2, 2, 12, 12, 2, 2, 12, 17, 1, 17, 2, 12, 17, 17, 1, 17, 12, 17, 1, 17, 2, 1, 13, 2, 2, 1, 1, 2, 2, 1, 1, 17, 2, 3, 17, 3, 1, 1, 1, 2, 1, 12, 3, 2, 2, 12, 17, 17, 12, 12, 13, 2, 3, 17, 2, 1, 3, 2, 2, 3, 17, 17, 12, 2, 3, 2, 2, 2, 3, 1, 3, 2, 1, 12, 2, 2, 3, 2, 3, 1, 2, 2, 17, 12, 7, 12, 12, 1, 1, 1, 2, 2, 1, 1, 1, 12, 13, 17, 17, 1, 17, 1, 1, 1, 1, 13, 1, 2, 17, 2, 1, 2, 2, 2, 2, 2, 12, 3, 1, 1, 2, 2, 2, 2, 1, 1, 13, 14, 1, 1, 2, 2, 2, 13, 17, 2, 1, 2, 15, 2, 2, 1, 2, 13, 1, 2, 1, 2, 17, 2, 1, 2, 2, 1, 6, 13, 1, 1, 1, 1, 2, 1, 1, 2, 1, 17, 2, 2, 13, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 2, 2, 3, 1, 1, 17, 1, 3, 1, 2, 17, 2, 2, 1, 1, 2, 13, 1, 1, 1, 1, 1, 2, 2, 1, 2, 13, 13, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 1, 1, 1, 2, 1, 2, 2, 2, 1, 2, 2, 13, 2, 1, 1, 1, 2, 14, 5, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 13, 1, 2, 2, 2, 2, 2, 2, 1, 1, 3, 2, 4, 2, 2, 1, 15, 2, 2, 1, 12, 1, 3, 1, 14, 1, 2, 1, 1, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 1, 1, 2, 1, 2, 3, 3, 2, 2, 1, 2, 2, 1, 17, 1, 2, 15, 2, 1, 13, 2, 1, 1, 3, 2, 1, 14, 2, 1, 2, 2, 1, 2, 14, 1, 2, 16, 2, 2, 2, 1, 1, 5, 1, 2, 1, 3, 2, 2, 2, 2, 13, 2, 5, 2, 1, 2, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 2, 3, 2, 13, 3, 2, 2, 2, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 1, 12, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 3, 2, 2, 1, 13, 1, 1, 1, 1, 3, 1, 15, 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 13, 2, 14, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 3, 1, 2, 2, 17, 13, 1, 2, 1, 2, 1, 2, 1, 1, 2, 1, 2, 2, 1, 2, 2, 1, 17, 2, 1, 1, 3, 2, 2, 1, 2, 1, 1, 2, 2, 1, 2, 1, 1, 13, 2, 12, 2, 1, 15, 2, 1, 3, 1, 1, 13, 1, 2, 1, 7, 2, 1, 1, 2, 1, 14, 13, 1, 2, 1, 17, 2, 1, 1, 2, 2, 1, 1, 2, 2, 1, 2, 13, 2, 1, 3, 1, 1, 12, 1, 17, 1, 1, 1, 13, 1, 2, 1, 1, 1, 1, 2, 16, 1, 6, 1, 3, 1, 2, 1, 1, 12, 1, 12, 1, 1, 1, 1, 1, 1, 6, 1, 1, 17, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 13, 1, 1, 1, 1, 12, 1, 1, 12, 17, 1, 1, 1, 1, 1, 2, 1, 1, 12, 1, 12, 1, 1, 1, 1, 13, 1, 1, 1, 1, 12, 2, 13, 1, 1, 17, 1, 1, 1, 1, 17, 1, 1, 12, 2, 1, 1, 1, 1, 1, 1, 12, 1, 1, 2, 13, 1, 1, 17, 1, 1, 13, 1, 12, 1, 1, 15, 2, 14, 12, 1, 1, 12, 1, 6, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 15, 1, 1, 12, 1, 1, 2, 14, 1, 1, 1, 1, 12, 1, 13, 15, 13, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 12, 1, 2, 1, 1, 1, 1, 2, 2, 1, 13, 1, 1, 1, 1, 13, 1, 1, 1, 1, 2, 1, 12, 1, 12, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 12, 13, 1, 1, 1, 15, 1, 17, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 13, 1, 17, 1, 1, 1, 15, 12, 1, 14, 1, 1, 14, 1, 1, 1, 1, 1, 1, 1, 1, 13, 1, 13, 1, 7, 1, 12, 14, 1, 13, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 14, 1, 1, 15, 1, 1, 12, 1, 13, 1, 1, 1, 1, 14, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 1, 12, 1, 1, 1, 13, 15, 13, 1, 1, 12, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 1, 17, 16, 1, 1, 1, 1, 1, 1, 1, 12, 14, 1, 13, 1, 12, 1, 14, 1, 1, 1, 1, 1, 1, 14, 2, 1, 1, 1, 1, 1, 1, 1, 1, 12, 12, 1, 1, 1, 1, 1, 1, 14, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 13, 1, 14, 1, 13, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 13, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 1, 17, 6, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 1, 1, 13, 12, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 12, 1, 1, 1, 1, 1, 1, 12, 1, 12, 1, 1, 1, 1, 15, 1, 6, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 6, 1, 1, 2, 1, 2, 7, 1, 17, 2, 1, 1, 1, 1, 1, 2, 12, 2, 2, 2, 1, 14, 2, 17, 15, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 15, 17, 7, 2, 17, 17, 2, 1, 3, 1, 2, 2, 17, 13, 14, 1, 2, 1, 1, 2, 14, 2, 13, 1, 1, 1, 15, 7, 1, 2, 1, 1, 2, 1, 3, 1, 1, 17, 1, 13, 17, 13, 17, 3, 1, 1, 14, 2, 2, 2, 2, 14, 2, 2, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 17, 1, 1, 17, 1, 2, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 3, 1, 17, 1, 1, 13, 17, 1, 2, 1, 2, 2, 1, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 13, 1, 12, 1, 17, 1, 2, 1, 3, 1, 1, 2, 2, 1, 1, 2, 1, 17, 14, 1, 2, 2, 2, 1, 17, 17, 13, 1, 1, 1, 2, 17, 2, 2, 2, 1, 2, 2, 17, 1, 2, 1, 2, 1, 2, 2, 1, 2, 1, 1, 2, 1, 1, 2, 2, 1, 3, 1, 1, 14, 1, 1, 1, 14, 1, 1, 1, 1, 1, 13, 17, 2, 1, 2, 2, 1, 2, 17, 1, 1, 1, 1, 14, 1, 2, 1, 1, 17, 2, 1, 1, 1, 1, 17, 1, 1, 1, 2, 1, 2, 1, 2, 2, 2, 7, 1, 1, 17, 17, 2, 2, 1, 13, 2, 1, 1, 1, 1, 1, 1, 7, 17, 1, 2, 1, 1, 1, 2, 1, 1, 2, 1, 1, 1, 2, 1, 1, 13, 1, 1, 2, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 2, 1, 12, 1, 3, 2, 2, 2, 1, 3, 2, 15, 1, 2, 1, 3, 13, 1, 1, 2, 1, 1, 1, 7, 7, 1, 2, 1, 1, 1, 1, 1, 15, 14, 1, 2, 17, 1, 3, 2, 12, 13, 1, 13, 17, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 7, 17, 1, 2, 15, 1, 2, 7, 1, 1, 2, 1, 17, 1, 1, 17, 2, 13, 1, 1, 1, 2, 2, 13, 1, 1, 1, 1, 1, 17, 1, 2, 2, 14, 1, 13, 1, 1, 2, 1, 1, 1, 13, 1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 3, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 17, 1, 2, 1, 1, 3, 1, 1, 2, 2, 3, 1, 1, 2, 2, 1, 2, 1, 2, 2, 3, 1, 2, 1, 3, 2, 2, 1, 1, 1, 13, 1, 1, 1, 1, 2, 1, 1, 1, 3, 3, 1, 1, 1, 2, 1, 1, 2, 2, 2, 2, 1, 1, 13, 1, 1, 7, 2, 2, 1, 2, 13, 2, 1, 1, 2, 1, 1, 15, 1, 2, 12, 2, 1, 13, 1, 1, 13, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 13, 2, 1, 1, 1, 2, 1, 2, 1, 13, 13, 2, 1, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 13, 2, 1, 1, 2, 15, 1, 2, 1, 1, 2, 1, 1, 13, 1, 1, 2, 13, 1, 1, 2, 1, 1, 2, 1, 1, 1, 2, 1, 13, 2, 2, 2, 15, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 13, 2, 1, 1, 7, 2, 2, 1, 3, 2, 2, 13, 1, 13, 1, 15, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 2, 2, 2, 1, 2, 15, 2, 1, 1, 2, 1, 1, 1, 2, 1, 2, 3, 1, 13, 2, 2, 15, 2, 13, 1, 13, 2, 2, 2, 3, 1, 2, 1, 15, 2, 13, 1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 7, 2, 2, 1, 1, 2, 15, 3, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 17, 2, 2, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 3, 13, 1, 1, 1, 1, 1, 2, 1, 1, 13, 2, 1, 1, 2, 1, 1, 1, 15, 1, 1, 2, 1, 1, 2, 1, 14, 1, 14, 2, 13, 1, 2, 2, 2, 2, 2, 1, 2, 13, 1, 2, 1, 2, 1, 1, 1, 2, 1, 2, 14, 1, 1, 1, 2, 3, 1, 2, 1, 2, 2, 1, 1, 1, 1, 2, 1, 12, 1, 13, 12, 3, 14, 1, 1, 1, 1, 1, 2, 14, 1, 1, 1, 1, 2, 1, 1, 2, 1, 2, 1, 1, 3, 2, 1, 1, 1, 1, 17, 2, 2, 1, 2, 1, 1, 1, 2, 3, 3, 3, 1, 2, 2, 2, 3, 2, 3, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2, 2, 1, 1, 1, 2, 1, 3, 2, 1, 1, 2, 1, 1, 1, 1, 3, 1, 1, 2, 3, 2, 3, 3, 2, 2, 1, 1, 2, 2, 2, 1, 1, 1, 2, 2, 1, 2, 1, 13, 1, 1, 3, 3, 2, 3, 12, 13, 1, 2, 1, 1, 13, 1, 13, 14, 2, 1, 2, 3, 3, 1, 1, 1, 1, 1, 13, 1, 1, 2, 1, 1, 1, 1, 1, 13, 1, 1, 15, 13, 1, 2, 1, 2, 1, 1, 2, 1, 1, 1, 1, 13, 2, 1, 1, 2, 2, 1, 13, 14, 1, 1, 2, 1, 1, 13, 2, 12, 13, 13, 2, 13, 1, 1, 1, 13, 1, 2, 1, 1, 1, 1, 2, 1, 13, 2, 1, 1, 1, 13, 13, 1, 2, 17, 2, 1, 1, 1, 1, 14, 1, 1, 2, 1, 12, 2, 1, 2, 1, 17, 1, 1, 1, 13, 1, 2, 17, 1, 2, 13, 1, 1, 14, 17, 1, 13, 2, 1, 2, 1, 1, 1, 12, 1, 17, 3, 2, 5, 13, 1, 1, 1, 7, 13, 1, 1, 2, 2, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 13, 2, 1, 1, 14, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 17, 1, 1, 1, 2, 1, 13, 1, 2, 1, 17, 14, 1, 1, 1, 2, 1, 12, 1, 13, 1, 2, 1, 1, 1, 2, 13, 1, 3, 12, 17, 1, 2, 1, 1, 1, 12, 2, 12, 13, 1, 17, 2, 2, 2, 1, 1, 1, 7, 1, 1, 2, 2, 1, 1, 2, 2, 1, 2, 13, 1, 1, 1, 15, 2, 17, 1, 2, 13, 1, 1, 13, 17, 1, 1, 1, 2, 14, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 14, 1, 17, 1, 2, 2, 2, 2, 14, 17, 17, 17, 17, 2, 17, 1, 1, 2, 2, 13, 12, 1, 13, 1, 1, 2, 1, 2, 13, 12, 2, 1, 1, 17, 1, 1, 1, 1, 3, 1, 17, 1, 1, 2, 1, 2, 1, 1, 17, 2, 1, 17, 14, 17, 1, 2, 1, 14, 1, 3, 1, 13, 2, 1, 1, 2, 17, 1, 13, 1, 1, 17, 1, 1, 1, 13, 2, 14, 17, 1, 2, 17, 2, 2, 17, 17, 1, 17, 17, 14, 17, 1, 1, 2, 14, 17, 1, 2, 2, 17, 1, 17, 1, 17, 2, 17, 17, 13, 17, 17, 1, 2, 1, 17, 1, 13, 17, 13, 17, 2, 13, 17, 17, 1, 17, 17, 2, 1, 13, 1, 2, 17, 14, 2, 1, 1, 2, 17, 1, 2, 2, 17, 12, 17, 2, 17, 13, 1, 2, 13, 2, 14, 2, 3, 17, 15, 14, 1, 12, 1, 2, 1, 2, 1, 17, 2, 13, 13, 1, 1, 17, 2, 1, 1, 1, 1, 2, 12, 14, 14, 14, 1, 2, 17, 13, 15, 1, 1, 13, 1, 14, 14, 2, 14, 1, 2, 1, 2, 2, 2, 1, 1, 16, 17, 1, 2, 12, 15, 1, 1, 1, 2, 2, 17, 2, 13, 17, 17, 1, 17, 17, 17, 12, 1, 17, 1, 7, 17, 17, 2, 2, 1, 1, 17, 17, 1, 2, 2, 17, 2, 17, 2, 14, 17, 1, 12, 17, 7, 17, 2, 13, 13, 1, 1, 17, 1, 17, 17, 17, 1, 7, 17, 1, 13, 1, 2, 2, 13, 13, 1, 12, 1, 1, 12, 12, 12, 12, 17, 1, 12, 2, 3, 14, 14, 2, 2, 15, 1, 1, 17, 2, 13, 13, 2, 14, 2, 5, 2, 1, 1, 14, 2, 1, 1, 13, 2, 3, 1, 1, 12, 1, 13, 1, 1, 2, 1, 13, 1, 2, 1, 12, 12, 1, 1, 2, 2, 1, 2, 2, 14, 14, 2, 2, 2, 2, 1, 2, 2, 15, 13, 6, 1, 1, 1, 1, 12, 2, 12, 14, 1, 1, 1, 1, 12, 1, 2, 2, 1, 12, 1, 1, 2, 1, 1, 1, 12, 1, 1, 12, 15, 1, 14, 1, 1, 1, 2, 13, 14, 1, 1, 6, 1, 12, 1, 1, 15, 1, 2, 2, 12, 12, 1, 1, 2, 2, 12, 1, 1, 1, 1, 1, 13, 1, 1, 12, 1, 1, 2, 2, 17, 12, 2, 15, 1, 1, 14, 2, 1, 17, 1, 13, 1, 12, 12, 2, 1, 1, 1, 1, 1, 15, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 13, 1, 1, 1, 12, 1, 15, 2, 12, 1, 15, 1, 1, 1, 1, 1, 2, 2, 2, 14, 1, 2, 3, 1, 2, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 3, 1, 2, 2, 1, 1, 1, 2, 15, 1, 2, 15, 2, 12, 2, 13, 1, 1, 17, 13, 1, 12, 2, 1, 1, 1, 1, 1, 14, 1, 12, 2, 1, 1, 5, 2, 2, 13, 13, 14, 1, 15, 2, 2, 2, 13, 2, 1, 2, 5, 1, 1, 1, 2, 1, 1, 1, 1, 2, 2, 1, 3, 1, 1, 2, 1, 2, 2, 15, 12, 1, 1, 1, 1, 17, 1, 2, 2, 3, 1, 1, 2, 2, 2, 17, 1, 1, 1, 1, 12, 1, 2, 3, 17, 1, 1, 1, 3, 1, 15, 1, 12, 12, 2, 2, 2, 1, 1, 2, 1, 1, 1, 14, 2, 2, 15, 1, 2, 2, 1, 1, 15, 1, 1, 1, 1, 1, 2, 1, 1, 17, 2, 1, 12, 1, 2, 1, 1, 3, 1, 1, 1, 14, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 17, 1, 1, 1, 1, 1, 12, 12, 2, 1, 1, 1, 1, 13, 2, 13, 2, 1, 2, 1, 2, 1, 1, 6, 1, 2, 1, 1, 14, 2, 15, 1, 6, 1, 2, 2, 1, 6, 1, 1 }; var interval = Interval1D.Create(0, feature.Length); Array.Sort(feature, targets); @@ -123,9 +122,6 @@ public void LinearSplitSearcher_FindBestSplit_Large() static double Weight(double v, double weight) { - if (v == 1.0) - return weight; - return 1.0; + return v == 1.0 ? weight : 1.0; } - } diff --git a/src/SharpLearning.DecisionTrees.Test/SplitSearchers/OnlyUniqueThresholdsSplitSearcherTest.cs b/src/SharpLearning.DecisionTrees.Test/SplitSearchers/OnlyUniqueThresholdsSplitSearcherTest.cs index 33f96805..882af2ce 100644 --- a/src/SharpLearning.DecisionTrees.Test/SplitSearchers/OnlyUniqueThresholdsSplitSearcherTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/SplitSearchers/OnlyUniqueThresholdsSplitSearcherTest.cs @@ -71,7 +71,6 @@ public void OnlyUniqueThresholdsSplitSearcher_FindBestSplit_Weight() Assert.AreEqual(expected, actual); } - [TestMethod] public void OnlyUniqueThresholdsSplitSearcher_FindBestSplit_DecisionTreeData() { @@ -100,8 +99,8 @@ public void OnlyUniqueThresholdsSplitSearcher_FindBestSplit_DecisionTreeData() [TestMethod] public void OnlyUniqueThresholdsSplitSearcher_FindBestSplit_Large() { - var feature = new double[] { 0, 0.693147180559945, 0, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.09861228866811, 0, 0, 0, 1.09861228866811, 0, 0, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 2.07944154167984, 0, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 1.6094379124341, 0, 1.79175946922805, 1.38629436111989, 1.6094379124341, 0, 0, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 0, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 0, 2.19722457733622, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.79175946922805, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0, 0, 0, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 0, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0, 0, 1.6094379124341, 1.38629436111989, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.94591014905531, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0, 2.56494935746154, 0.693147180559945, 0, 0, 0, 0, 0, 1.6094379124341, 0, 0, 0, 0, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0, 2.56494935746154, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 2.07944154167984, 2.30258509299405, 1.38629436111989, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0, 0, 0, 0, 0, 1.09861228866811, 1.09861228866811, 0, 2.19722457733622, 2.30258509299405, 2.70805020110221, 2.07944154167984, 1.94591014905531, 2.39789527279837, 2.30258509299405, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 2.77258872223978, 2.19722457733622, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.09861228866811, 2.19722457733622, 2.30258509299405, 1.79175946922805, 2.07944154167984, 1.94591014905531, 1.38629436111989, 2.19722457733622, 3.04452243772342, 2.07944154167984, 2.19722457733622, 2.07944154167984, 2.484906649788, 0.693147180559945, 1.94591014905531, 1.6094379124341, 2.56494935746154, 1.38629436111989, 2.63905732961526, 1.94591014905531, 2.19722457733622, 1.94591014905531, 1.09861228866811, 1.38629436111989, 1.79175946922805, 2.99573227355399, 0, 2.484906649788, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.19722457733622, 2.56494935746154, 1.79175946922805, 2.83321334405622, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.79175946922805, 2.30258509299405, 2.56494935746154, 2.39789527279837, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.94591014905531, 2.77258872223978, 2.484906649788, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 2.484906649788, 0, 0.693147180559945, 0.693147180559945, 2.30258509299405, 1.94591014905531, 2.56494935746154, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.09861228866811, 0, 1.09861228866811, 2.19722457733622, 2.07944154167984, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.94591014905531, 1.6094379124341, 0.693147180559945, 2.77258872223978, 2.484906649788, 1.38629436111989, 2.39789527279837, 2.39789527279837, 2.19722457733622, 0.693147180559945, 1.6094379124341, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.19722457733622, 2.484906649788, 1.38629436111989, 2.07944154167984, 1.38629436111989, 1.38629436111989, 2.89037175789616, 1.6094379124341, 2.56494935746154, 1.38629436111989, 2.19722457733622, 1.38629436111989, 2.30258509299405, 2.07944154167984, 1.79175946922805, 2.56494935746154, 1.38629436111989, 2.77258872223978, 1.79175946922805, 2.39789527279837, 1.79175946922805, 1.09861228866811, 1.94591014905531, 1.79175946922805, 2.19722457733622, 1.6094379124341, 2.56494935746154, 1.38629436111989, 0.693147180559945, 2.07944154167984, 1.94591014905531, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.39789527279837, 2.39789527279837, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0, 1.79175946922805, 2.83321334405622, 1.79175946922805, 1.79175946922805, 0, 2.30258509299405, 1.38629436111989, 2.77258872223978, 1.94591014905531, 1.09861228866811, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.6094379124341, 2.30258509299405, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.79175946922805, 2.07944154167984, 2.39789527279837, 1.79175946922805, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 1.79175946922805, 2.30258509299405, 1.94591014905531, 1.09861228866811, 3.2188758248682, 1.94591014905531, 1.79175946922805, 0, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.94591014905531, 1.38629436111989, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.56494935746154, 1.6094379124341, 1.09861228866811, 1.94591014905531, 2.07944154167984, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.83321334405622, 1.94591014905531, 2.39789527279837, 2.30258509299405, 2.39789527279837, 1.94591014905531, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.09861228866811, 0.693147180559945, 1.6094379124341, 2.63905732961526, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.94591014905531, 2.30258509299405, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.99573227355399, 2.07944154167984, 2.77258872223978, 1.6094379124341, 0, 2.07944154167984, 1.09861228866811, 1.79175946922805, 2.39789527279837, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.19722457733622, 2.30258509299405, 2.484906649788, 1.94591014905531, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 2.63905732961526, 1.6094379124341, 1.94591014905531, 1.6094379124341, 2.63905732961526, 2.56494935746154, 1.94591014905531, 2.484906649788, 1.79175946922805, 2.07944154167984, 2.07944154167984, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.94591014905531, 0, 1.38629436111989, 2.484906649788, 2.39789527279837, 1.94591014905531, 2.39789527279837, 2.07944154167984, 1.6094379124341, 2.63905732961526, 1.79175946922805, 2.63905732961526, 2.39789527279837, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 1.79175946922805, 1.79175946922805, 2.63905732961526, 2.19722457733622, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.39789527279837, 1.38629436111989, 2.19722457733622, 1.79175946922805, 2.19722457733622, 1.09861228866811, 1.09861228866811, 2.19722457733622, 2.39789527279837, 1.09861228866811, 1.38629436111989, 1.79175946922805, 2.63905732961526, 0, 1.6094379124341, 1.79175946922805, 0, 2.63905732961526, 2.07944154167984, 1.38629436111989, 1.79175946922805, 2.63905732961526, 0.693147180559945, 1.79175946922805, 1.6094379124341, 2.19722457733622, 0, 2.39789527279837, 0.693147180559945, 1.79175946922805, 2.39789527279837, 2.63905732961526, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.70805020110221, 1.09861228866811, 1.79175946922805, 1.09861228866811, 2.56494935746154, 2.19722457733622, 1.6094379124341, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 2.56494935746154, 1.38629436111989, 1.38629436111989, 2.484906649788, 2.30258509299405, 2.30258509299405, 0, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.30258509299405, 1.94591014905531, 1.79175946922805, 0, 2.77258872223978, 2.56494935746154, 2.30258509299405, 2.19722457733622, 2.30258509299405, 1.6094379124341, 2.30258509299405, 1.79175946922805, 2.19722457733622, 0.693147180559945, 1.79175946922805, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.09861228866811, 2.07944154167984, 1.09861228866811, 2.30258509299405, 0.693147180559945, 2.83321334405622, 2.83321334405622, 2.83321334405622, 2.07944154167984, 2.77258872223978, 2.30258509299405, 1.38629436111989, 0.693147180559945, 2.56494935746154, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.79175946922805, 3.09104245335832, 2.30258509299405, 2.56494935746154, 1.38629436111989, 2.07944154167984, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.89037175789616, 2.19722457733622, 1.79175946922805, 2.07944154167984, 1.09861228866811, 0, 0, 0, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.70805020110221, 2.83321334405622, 1.79175946922805, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 2.39789527279837, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.07944154167984, 2.56494935746154, 2.07944154167984, 1.38629436111989, 2.70805020110221, 1.6094379124341, 2.484906649788, 1.79175946922805, 1.94591014905531, 2.70805020110221, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 0, 0, 1.38629436111989, 2.63905732961526, 1.79175946922805, 0.693147180559945, 1.94591014905531, 1.79175946922805, 2.07944154167984, 2.56494935746154, 0.693147180559945, 2.70805020110221, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.6094379124341, 0, 1.38629436111989, 2.30258509299405, 1.79175946922805, 0, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.484906649788, 2.30258509299405, 1.6094379124341, 1.79175946922805, 3.17805383034795, 2.70805020110221, 1.6094379124341, 2.19722457733622, 0.693147180559945, 2.70805020110221, 2.07944154167984, 2.63905732961526, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.56494935746154, 2.07944154167984, 1.94591014905531, 2.39789527279837, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0, 1.09861228866811, 2.484906649788, 2.19722457733622, 0, 1.94591014905531, 2.07944154167984, 0, 0, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.38629436111989, 2.39789527279837, 2.56494935746154, 0.693147180559945, 2.30258509299405, 1.94591014905531, 2.07944154167984, 2.30258509299405, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.6094379124341, 0.693147180559945, 0.693147180559945, 2.83321334405622, 2.70805020110221, 0.693147180559945, 1.94591014905531, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0, 2.484906649788, 1.38629436111989, 1.79175946922805, 2.484906649788, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.77258872223978, 2.63905732961526, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.89037175789616, 0, 0, 2.39789527279837, 1.38629436111989, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.39789527279837, 2.56494935746154, 1.6094379124341, 2.56494935746154, 2.63905732961526, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.6094379124341, 0, 0, 0, 0, 0.693147180559945, 1.94591014905531, 2.39789527279837, 0.693147180559945, 1.6094379124341, 1.94591014905531, 2.56494935746154, 2.39789527279837, 2.63905732961526, 0, 1.09861228866811, 0, 0, 2.07944154167984, 1.79175946922805, 2.63905732961526, 0.693147180559945, 1.09861228866811, 2.39789527279837, 0.693147180559945, 2.63905732961526, 2.07944154167984, 1.79175946922805, 1.38629436111989, 0, 1.79175946922805, 2.19722457733622, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.6094379124341, 2.39789527279837, 2.30258509299405, 1.09861228866811, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.6094379124341, 0.693147180559945, 2.30258509299405, 0, 1.94591014905531, 2.30258509299405, 1.94591014905531, 0.693147180559945, 2.77258872223978, 0, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 2.77258872223978, 2.63905732961526, 2.63905732961526, 1.6094379124341, 2.19722457733622, 2.484906649788, 2.484906649788, 1.79175946922805, 0.693147180559945, 2.484906649788, 2.30258509299405, 2.39789527279837, 2.484906649788, 1.94591014905531, 2.30258509299405, 1.6094379124341, 1.94591014905531, 1.09861228866811, 2.56494935746154, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 1.79175946922805, 0, 0, 0, 1.79175946922805, 0, 1.6094379124341, 0, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0, 2.56494935746154, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.484906649788, 0, 1.38629436111989, 1.94591014905531, 2.19722457733622, 2.19722457733622, 1.79175946922805, 1.79175946922805, 2.484906649788, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.94591014905531, 2.19722457733622, 1.94591014905531, 2.19722457733622, 3.04452243772342, 0.693147180559945, 2.30258509299405, 2.56494935746154, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.09861228866811, 2.94443897916644, 1.79175946922805, 2.83321334405622, 2.07944154167984, 1.38629436111989, 1.38629436111989, 2.19722457733622, 2.30258509299405, 2.30258509299405, 1.94591014905531, 1.94591014905531, 2.99573227355399, 2.30258509299405, 2.30258509299405, 2.30258509299405, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.39789527279837, 2.83321334405622, 1.09861228866811, 0, 2.30258509299405, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.30258509299405, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.94591014905531, 2.39789527279837, 1.94591014905531, 3.04452243772342, 1.94591014905531, 2.484906649788, 0.693147180559945, 2.07944154167984, 1.6094379124341, 2.77258872223978, 0, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0, 2.63905732961526, 1.38629436111989, 0, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.94591014905531, 2.484906649788, 1.79175946922805, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.99573227355399, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.07944154167984, 2.39789527279837, 0.693147180559945, 2.19722457733622, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 3.04452243772342, 1.94591014905531, 0.693147180559945, 2.19722457733622, 0, 0, 0.693147180559945, 0, 2.56494935746154, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.39789527279837, 1.38629436111989, 2.07944154167984, 2.77258872223978, 1.6094379124341, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.09861228866811, 2.30258509299405, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.6094379124341, 2.39789527279837, 2.07944154167984, 1.79175946922805, 2.63905732961526, 1.6094379124341, 2.94443897916644, 1.38629436111989, 2.19722457733622, 2.484906649788, 1.09861228866811, 1.38629436111989, 1.38629436111989, 2.89037175789616, 1.94591014905531, 1.09861228866811, 1.09861228866811, 2.94443897916644, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0.693147180559945, 2.39789527279837, 1.38629436111989, 2.94443897916644, 1.38629436111989, 1.38629436111989, 1.6094379124341, 2.56494935746154, 3.04452243772342, 2.39789527279837, 2.30258509299405, 1.94591014905531, 2.19722457733622, 1.38629436111989, 1.38629436111989, 0, 1.38629436111989, 1.6094379124341, 2.07944154167984, 2.07944154167984, 2.30258509299405, 1.38629436111989, 1.09861228866811, 2.77258872223978, 1.38629436111989, 3.04452243772342, 2.39789527279837, 1.38629436111989, 0.693147180559945, 1.94591014905531, 2.30258509299405, 2.63905732961526, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.94591014905531, 1.38629436111989, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.6094379124341, 1.6094379124341, 0.693147180559945, 2.77258872223978, 0, 2.19722457733622, 2.484906649788, 1.09861228866811, 2.56494935746154, 0.693147180559945, 1.6094379124341, 2.39789527279837, 0.693147180559945, 2.70805020110221, 1.09861228866811, 1.6094379124341, 2.30258509299405, 2.07944154167984, 0.693147180559945, 2.83321334405622, 1.94591014905531, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 1.94591014905531, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.6094379124341, 2.19722457733622, 2.63905732961526, 1.6094379124341, 2.19722457733622, 1.94591014905531, 2.484906649788, 2.70805020110221, 1.09861228866811, 2.07944154167984, 2.39789527279837, 2.484906649788, 2.39789527279837, 1.38629436111989, 2.19722457733622, 2.07944154167984, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.83321334405622, 1.09861228866811, 2.94443897916644, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.6094379124341, 3.09104245335832, 1.79175946922805, 0.693147180559945, 1.09861228866811, 2.94443897916644, 0.693147180559945, 0.693147180559945, 2.39789527279837, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.94591014905531, 0.693147180559945, 2.07944154167984, 1.94591014905531, 0, 2.07944154167984, 1.6094379124341, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.94591014905531, 0, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.07944154167984, 2.07944154167984, 0, 2.07944154167984, 2.07944154167984, 1.79175946922805, 2.30258509299405, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.07944154167984, 2.83321334405622, 2.07944154167984, 2.07944154167984, 2.484906649788, 1.6094379124341, 1.09861228866811, 2.30258509299405, 2.30258509299405, 2.70805020110221, 2.19722457733622, 1.79175946922805, 2.19722457733622, 1.94591014905531, 2.39789527279837, 2.39789527279837, 2.07944154167984, 1.79175946922805, 2.63905732961526, 1.6094379124341, 2.30258509299405, 1.6094379124341, 0.693147180559945, 2.30258509299405, 2.30258509299405, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.07944154167984, 2.63905732961526, 2.07944154167984, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.09861228866811, 2.77258872223978, 2.39789527279837, 1.6094379124341, 0.693147180559945, 1.6094379124341, 0, 2.99573227355399, 0.693147180559945, 0.693147180559945, 1.79175946922805, 2.484906649788, 1.38629436111989, 0, 2.07944154167984, 0, 1.09861228866811, 1.09861228866811, 2.19722457733622, 1.94591014905531, 1.94591014905531, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.94591014905531, 2.30258509299405, 2.07944154167984, 1.6094379124341, 1.94591014905531, 2.07944154167984, 2.07944154167984, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.484906649788, 1.09861228866811, 1.6094379124341, 1.94591014905531, 2.70805020110221, 1.09861228866811, 2.63905732961526, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 2.19722457733622, 2.07944154167984, 1.09861228866811, 1.94591014905531, 2.39789527279837, 2.99573227355399, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.70805020110221, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 2.63905732961526, 2.63905732961526, 1.94591014905531, 1.79175946922805, 2.07944154167984, 1.79175946922805, 2.07944154167984, 2.484906649788, 1.94591014905531, 1.6094379124341, 2.07944154167984, 1.38629436111989, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.09861228866811, 1.6094379124341, 2.94443897916644, 1.79175946922805, 0, 0.693147180559945, 0, 2.19722457733622, 0.693147180559945, 1.94591014905531, 2.07944154167984, 1.79175946922805, 1.38629436111989, 2.19722457733622, 1.09861228866811, 1.94591014905531, 2.19722457733622, 1.09861228866811, 2.07944154167984, 1.6094379124341, 2.19722457733622, 1.94591014905531, 0.693147180559945, 1.94591014905531, 2.19722457733622, 3.04452243772342, 1.6094379124341, 2.39789527279837, 0, 2.77258872223978, 1.6094379124341, 2.30258509299405, 1.38629436111989, 1.6094379124341, 2.07944154167984, 2.30258509299405, 1.38629436111989, 0.693147180559945, 2.39789527279837, 0, 1.94591014905531, 2.19722457733622, 1.09861228866811, 1.79175946922805, 2.07944154167984, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0, 2.19722457733622, 1.09861228866811, 2.30258509299405, 2.07944154167984, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.19722457733622, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.6094379124341, 2.30258509299405, 2.07944154167984, 1.09861228866811, 2.70805020110221, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 2.07944154167984, 1.79175946922805, 2.39789527279837, 1.94591014905531, 1.79175946922805, 2.39789527279837, 1.6094379124341, 2.39789527279837, 2.07944154167984, 1.09861228866811, 1.6094379124341, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.09861228866811, 2.19722457733622, 1.6094379124341, 2.07944154167984, 0, 1.6094379124341, 2.89037175789616, 1.38629436111989, 2.99573227355399, 2.30258509299405, 1.94591014905531, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.94591014905531, 2.30258509299405, 2.56494935746154, 2.77258872223978, 0.693147180559945, 2.99573227355399, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.6094379124341, 2.63905732961526, 0, 3.36729582998647, 1.6094379124341, 2.07944154167984, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.63905732961526, 0.693147180559945, 1.94591014905531, 2.56494935746154, 1.79175946922805, 2.30258509299405, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.94591014905531, 2.19722457733622, 2.70805020110221, 2.07944154167984, 2.70805020110221, 0, 1.38629436111989, 2.07944154167984, 1.79175946922805, 2.19722457733622, 1.94591014905531, 2.30258509299405, 2.30258509299405, 0.693147180559945, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.6094379124341, 2.07944154167984, 2.30258509299405, 2.07944154167984, 2.77258872223978, 1.79175946922805, 1.09861228866811, 1.79175946922805, 2.56494935746154, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.79175946922805, 2.30258509299405, 2.07944154167984, 2.07944154167984, 1.94591014905531, 0, 2.19722457733622, 1.79175946922805, 1.79175946922805, 1.94591014905531, 2.39789527279837, 2.30258509299405, 1.6094379124341, 2.484906649788, 2.39789527279837, 1.79175946922805, 2.77258872223978, 2.56494935746154, 1.09861228866811, 1.38629436111989, 2.484906649788, 2.19722457733622, 1.6094379124341, 1.09861228866811, 2.39789527279837, 1.09861228866811, 2.39789527279837, 1.94591014905531, 3.29583686600433, 1.38629436111989, 2.56494935746154, 0.693147180559945, 1.38629436111989, 2.19722457733622, 2.56494935746154, 2.19722457733622, 1.6094379124341, 2.19722457733622, 1.38629436111989, 1.94591014905531, 2.77258872223978, 1.38629436111989, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.30258509299405, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.09861228866811, 2.07944154167984, 2.70805020110221, 2.07944154167984, 1.79175946922805, 2.19722457733622, 2.63905732961526, 0.693147180559945, 2.19722457733622, 0.693147180559945, 2.19722457733622, 0.693147180559945, 2.07944154167984, 1.94591014905531, 2.70805020110221, 1.6094379124341, 1.6094379124341, 1.79175946922805, 2.39789527279837, 2.70805020110221, 1.09861228866811, 2.484906649788, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 1.6094379124341, 0, 2.63905732961526, 2.30258509299405, 1.09861228866811, 1.94591014905531, 0, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.79175946922805, 1.6094379124341, 2.07944154167984, 1.38629436111989, 2.39789527279837, 1.94591014905531, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.79175946922805, 1.09861228866811, 2.70805020110221, 2.484906649788, 1.94591014905531, 1.6094379124341, 1.38629436111989, 2.19722457733622, 1.6094379124341, 2.77258872223978, 2.77258872223978, 2.39789527279837, 2.39789527279837, 0.693147180559945, 2.484906649788, 2.484906649788, 1.6094379124341, 2.39789527279837, 1.09861228866811, 0.693147180559945, 2.19722457733622, 2.39789527279837, 2.484906649788, 2.19722457733622, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.07944154167984, 2.30258509299405, 2.30258509299405, 2.19722457733622, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 3.09104245335832, 2.70805020110221, 2.19722457733622, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.56494935746154, 1.09861228866811, 1.94591014905531, 2.70805020110221, 2.484906649788, 1.38629436111989, 2.19722457733622, 1.6094379124341, 2.484906649788, 1.38629436111989, 2.30258509299405, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.79175946922805, 2.19722457733622, 1.38629436111989, 0.693147180559945, 2.39789527279837, 0, 2.07944154167984, 2.94443897916644, 2.89037175789616, 2.30258509299405, 1.38629436111989, 1.6094379124341, 1.6094379124341, 2.39789527279837, 2.63905732961526, 2.484906649788, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.6094379124341, 2.63905732961526, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 2.19722457733622, 1.94591014905531, 1.6094379124341, 2.56494935746154, 0.693147180559945, 2.30258509299405, 0.693147180559945, 2.56494935746154, 2.56494935746154, 2.30258509299405, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.77258872223978, 0, 0.693147180559945, 2.07944154167984, 2.30258509299405, 2.07944154167984, 1.09861228866811, 2.30258509299405, 2.77258872223978, 2.07944154167984, 1.79175946922805, 1.38629436111989, 1.94591014905531, 2.89037175789616, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.6094379124341, 2.70805020110221, 1.38629436111989, 2.39789527279837, 2.30258509299405, 1.79175946922805, 1.6094379124341, 2.07944154167984, 2.77258872223978, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.56494935746154, 2.89037175789616, 1.09861228866811, 2.63905732961526, 2.56494935746154, 1.09861228866811, 2.30258509299405, 1.94591014905531, 2.39789527279837, 2.07944154167984, 1.79175946922805, 2.63905732961526, 3.29583686600433, 1.6094379124341, 2.19722457733622, 2.30258509299405, 1.38629436111989, 1.38629436111989, 2.484906649788, 2.07944154167984, 1.79175946922805, 1.09861228866811, 2.07944154167984, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.83321334405622, 1.09861228866811, 1.6094379124341, 2.07944154167984, 2.39789527279837, 2.30258509299405, 0, 1.94591014905531, 0.693147180559945, 2.484906649788, 2.39789527279837, 0, 0, 2.07944154167984, 1.09861228866811, 2.07944154167984, 2.63905732961526, 2.484906649788, 2.07944154167984, 1.38629436111989, 2.30258509299405, 2.07944154167984, 2.30258509299405, 3.09104245335832, 2.63905732961526, 1.09861228866811, 0, 2.94443897916644, 1.79175946922805, 1.94591014905531, 2.30258509299405, 2.30258509299405, 2.07944154167984, 2.39789527279837, 2.484906649788, 1.6094379124341, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0.693147180559945, 1.38629436111989, 2.19722457733622, 0, 2.19722457733622, 2.484906649788, 0.693147180559945, 1.79175946922805, 0.693147180559945, 2.30258509299405, 1.38629436111989, 1.94591014905531, 0, 0, 1.79175946922805, 0.693147180559945, 1.79175946922805, 0, 1.94591014905531, 1.6094379124341, 2.56494935746154, 2.07944154167984, 2.07944154167984, 0, 1.94591014905531, 2.39789527279837, 1.38629436111989, 1.38629436111989, 2.39789527279837, 2.39789527279837, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 2.07944154167984, 1.38629436111989, 2.77258872223978, 1.38629436111989, 2.39789527279837, 2.70805020110221, 1.38629436111989, 1.94591014905531, 1.94591014905531, 1.6094379124341, 2.19722457733622, 2.19722457733622, 2.07944154167984, 1.38629436111989, 0.693147180559945, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.70805020110221, 2.39789527279837, 2.63905732961526, 2.19722457733622, 1.09861228866811, 1.6094379124341, 1.94591014905531, 1.09861228866811, 2.19722457733622, 2.63905732961526, 2.56494935746154, 2.39789527279837, 0.693147180559945, 2.07944154167984, 2.19722457733622, 1.94591014905531, 2.07944154167984, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 2.63905732961526, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.94591014905531, 2.70805020110221, 0, 1.38629436111989, 2.77258872223978, 1.79175946922805, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.09861228866811, 1.79175946922805, 1.09861228866811, 2.30258509299405, 2.89037175789616, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.09861228866811, 2.39789527279837, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.09861228866811, 2.70805020110221, 1.94591014905531, 2.07944154167984, 1.6094379124341, 0, 1.79175946922805, 2.19722457733622, 1.94591014905531, 0.693147180559945, 0, 2.30258509299405, 2.30258509299405, 2.07944154167984, 1.79175946922805, 2.70805020110221, 1.38629436111989, 2.63905732961526, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.19722457733622, 2.19722457733622, 0, 1.38629436111989, 1.79175946922805, 2.19722457733622, 0.693147180559945, 1.94591014905531, 1.79175946922805, 2.19722457733622, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.94591014905531, 0, 0, 2.89037175789616, 0, 0, 1.6094379124341, 3.09104245335832, 1.38629436111989, 2.30258509299405, 2.63905732961526, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0, 0, 1.09861228866811, 2.39789527279837, 0.693147180559945, 0, 1.94591014905531, 2.39789527279837, 1.38629436111989, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 2.70805020110221, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.6094379124341, 0, 2.77258872223978, 2.99573227355399, 0, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.94591014905531, 1.09861228866811, 1.79175946922805, 0.693147180559945, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.19722457733622, 0, 1.09861228866811, 1.6094379124341, 1.6094379124341, 0, 2.83321334405622, 2.39789527279837, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.38629436111989, 2.63905732961526, 2.07944154167984, 0.693147180559945, 0, 1.94591014905531, 2.39789527279837, 1.38629436111989, 0, 1.94591014905531, 2.07944154167984, 2.07944154167984, 1.6094379124341, 2.19722457733622, 2.39789527279837, 1.79175946922805, 0, 1.79175946922805, 2.484906649788, 2.63905732961526, 2.19722457733622, 2.07944154167984, 2.07944154167984, 2.30258509299405, 2.77258872223978, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.09861228866811, 2.19722457733622, 2.56494935746154, 1.79175946922805, 0.693147180559945, 1.94591014905531, 0, 0.693147180559945, 2.99573227355399, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.07944154167984, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 2.07944154167984, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 0, 1.38629436111989, 2.63905732961526, 2.07944154167984, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.6094379124341, 0, 2.30258509299405, 1.38629436111989, 0, 0, 0, 1.94591014905531, 1.6094379124341, 2.56494935746154, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.07944154167984, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 2.30258509299405, 1.94591014905531, 0, 2.484906649788, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.07944154167984, 2.19722457733622, 1.09861228866811, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.79175946922805, 3.09104245335832, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.94591014905531, 1.6094379124341, 2.63905732961526, 1.38629436111989, 2.484906649788, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.56494935746154, 1.94591014905531, 2.94443897916644, 2.39789527279837, 1.6094379124341, 1.38629436111989, 0.693147180559945, 2.83321334405622, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.79175946922805, 2.30258509299405, 1.79175946922805, 1.94591014905531, 2.07944154167984, 2.30258509299405, 1.94591014905531, 2.19722457733622, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 2.07944154167984, 2.19722457733622, 2.39789527279837, 0.693147180559945, 2.70805020110221, 1.94591014905531, 2.484906649788, 2.30258509299405, 1.94591014905531, 2.30258509299405, 1.94591014905531, 2.484906649788, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 2.83321334405622, 2.19722457733622, 2.07944154167984, 1.6094379124341, 2.07944154167984, 2.19722457733622, 1.94591014905531, 2.19722457733622, 2.89037175789616, 2.07944154167984, 2.19722457733622, 2.77258872223978, 2.484906649788, 1.6094379124341, 2.07944154167984, 2.56494935746154, 1.38629436111989, 2.30258509299405, 1.94591014905531, 2.56494935746154, 1.6094379124341, 1.38629436111989, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.19722457733622, 2.19722457733622, 1.09861228866811, 2.19722457733622, 2.19722457733622, 1.09861228866811, 2.19722457733622, 2.39789527279837, 1.6094379124341, 2.07944154167984, 2.77258872223978, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.38629436111989, 2.70805020110221, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0.693147180559945, 2.70805020110221, 1.94591014905531, 1.79175946922805, 2.19722457733622, 2.83321334405622, 1.09861228866811, 1.79175946922805, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.56494935746154, 2.39789527279837, 2.19722457733622, 1.6094379124341, 2.30258509299405, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.6094379124341, 2.07944154167984, 1.79175946922805, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.6094379124341, 2.484906649788, 1.79175946922805, 0, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.30258509299405, 1.94591014905531, 0.693147180559945, 0.693147180559945, 2.39789527279837, 2.07944154167984, 1.79175946922805, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 2.19722457733622, 2.30258509299405, 1.79175946922805, 1.79175946922805, 1.6094379124341, 3.17805383034795, 1.94591014905531, 0.693147180559945, 1.94591014905531, 2.19722457733622, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.19722457733622, 1.94591014905531, 2.07944154167984, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.94591014905531, 2.07944154167984, 1.09861228866811, 2.07944154167984, 2.77258872223978, 2.30258509299405, 2.19722457733622, 0.693147180559945, 2.30258509299405, 3.2188758248682, 1.6094379124341, 1.94591014905531, 1.94591014905531, 1.79175946922805, 2.63905732961526, 2.83321334405622, 0, 1.09861228866811, 2.39789527279837, 1.79175946922805, 2.30258509299405, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.94591014905531, 2.70805020110221, 2.07944154167984, 1.94591014905531, 2.70805020110221, 1.94591014905531, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.19722457733622, 2.70805020110221, 0, 1.6094379124341, 2.39789527279837, 1.94591014905531, 1.38629436111989, 0.693147180559945, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.38629436111989, 2.19722457733622, 1.94591014905531, 1.6094379124341, 1.6094379124341, 1.6094379124341, 2.77258872223978, 2.19722457733622, 2.30258509299405, 1.79175946922805, 1.79175946922805, 1.79175946922805, 2.30258509299405, 1.6094379124341, 1.79175946922805, 2.19722457733622, 0, 1.09861228866811, 0, 2.63905732961526, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.484906649788, 1.79175946922805, 0.693147180559945, 1.38629436111989, 2.484906649788, 0, 1.94591014905531, 1.6094379124341, 1.09861228866811, 2.19722457733622, 0, 1.79175946922805, 2.56494935746154, 0.693147180559945, 1.79175946922805, 1.79175946922805, 2.19722457733622, 1.6094379124341, 1.79175946922805, 2.77258872223978, 0, 2.07944154167984, 2.484906649788, 1.09861228866811, 1.94591014905531, 1.94591014905531, 1.79175946922805, 1.94591014905531, 1.09861228866811, 2.19722457733622, 2.484906649788, 0.693147180559945, 1.09861228866811, 2.77258872223978, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.09861228866811, 1.09861228866811, 2.39789527279837, 2.63905732961526, 1.38629436111989, 2.19722457733622, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.79175946922805, 2.89037175789616, 1.6094379124341, 3.04452243772342, 2.63905732961526, 1.94591014905531, 2.30258509299405, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.70805020110221, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.39789527279837, 1.09861228866811, 1.6094379124341, 1.94591014905531, 0, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 2.484906649788, 1.79175946922805, 2.70805020110221, 3.04452243772342, 2.484906649788, 3.04452243772342, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 0.693147180559945, 2.19722457733622, 2.07944154167984, 0.693147180559945, 1.79175946922805, 2.30258509299405, 1.79175946922805, 0, 1.79175946922805, 2.99573227355399, 1.38629436111989, 1.38629436111989, 1.94591014905531, 2.07944154167984, 2.63905732961526, 2.30258509299405, 2.19722457733622, 1.79175946922805, 2.19722457733622, 0.693147180559945, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.09861228866811, 2.07944154167984, 2.70805020110221, 1.38629436111989, 0, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 2.56494935746154, 1.6094379124341, 0, 1.94591014905531, 1.6094379124341, 0, 2.19722457733622, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.63905732961526, 0, 1.94591014905531, 1.79175946922805, 2.56494935746154, 1.94591014905531, 2.19722457733622, 1.79175946922805, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.07944154167984, 1.38629436111989, 1.38629436111989, 2.07944154167984, 3.17805383034795, 1.94591014905531, 1.6094379124341, 1.94591014905531, 1.79175946922805, 0.693147180559945, 2.484906649788, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 2.39789527279837, 0, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.94591014905531, 2.30258509299405, 2.19722457733622, 0, 1.79175946922805, 0, 2.07944154167984, 2.39789527279837, 1.09861228866811, 1.79175946922805, 1.6094379124341, 1.38629436111989, 2.30258509299405, 2.19722457733622, 0, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.6094379124341, 2.19722457733622, 2.39789527279837, 1.6094379124341, 2.39789527279837, 2.484906649788, 2.484906649788, 2.39789527279837, 1.79175946922805, 2.30258509299405, 2.39789527279837, 2.19722457733622, 1.6094379124341, 2.39789527279837, 2.39789527279837, 2.30258509299405, 1.09861228866811, 2.07944154167984, 0.693147180559945, 2.89037175789616, 1.94591014905531, 1.6094379124341, 1.38629436111989, 2.39789527279837, 1.09861228866811, 2.07944154167984, 2.63905732961526, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.56494935746154, 2.56494935746154, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 2.56494935746154, 1.09861228866811, 2.30258509299405, 2.30258509299405, 2.484906649788, 2.70805020110221, 1.6094379124341, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 2.30258509299405, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.39789527279837, 0, 2.70805020110221, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.38629436111989, 2.30258509299405, 1.6094379124341, 1.38629436111989, 1.6094379124341, 2.39789527279837, 1.79175946922805, 1.38629436111989, 0, 1.38629436111989, 1.94591014905531, 2.39789527279837, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0.693147180559945, 2.07944154167984, 2.19722457733622, 2.63905732961526, 2.07944154167984, 1.94591014905531, 0.693147180559945, 2.07944154167984, 2.07944154167984, 2.484906649788, 1.79175946922805, 2.07944154167984, 1.94591014905531, 2.19722457733622, 1.09861228866811, 0, 1.6094379124341, 1.6094379124341, 0, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.6094379124341, 2.07944154167984, 2.19722457733622, 0.693147180559945, 1.09861228866811, 2.63905732961526, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.38629436111989, 2.94443897916644, 2.07944154167984, 1.09861228866811, 2.94443897916644, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.6094379124341, 2.30258509299405, 1.09861228866811, 0, 1.94591014905531, 2.30258509299405, 1.6094379124341, 2.30258509299405, 1.94591014905531, 1.09861228866811, 2.83321334405622, 1.09861228866811, 0, 2.39789527279837, 2.30258509299405, 1.6094379124341, 1.79175946922805, 1.94591014905531, 2.19722457733622, 2.89037175789616, 1.94591014905531, 2.19722457733622, 2.19722457733622, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.30258509299405, 1.6094379124341, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.79175946922805, 2.56494935746154, 2.30258509299405, 1.6094379124341, 0.693147180559945, 0, 2.70805020110221, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.30258509299405, 2.07944154167984, 2.30258509299405, 0, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 2.19722457733622, 2.63905732961526, 1.94591014905531, 2.30258509299405, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 2.39789527279837, 1.6094379124341, 1.94591014905531, 1.09861228866811, 2.30258509299405, 1.38629436111989, 2.07944154167984, 1.79175946922805, 1.6094379124341, 2.77258872223978, 2.19722457733622, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.07944154167984, 0.693147180559945, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.63905732961526, 1.38629436111989, 2.30258509299405, 2.07944154167984, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 2.07944154167984, 1.79175946922805, 2.30258509299405, 2.56494935746154, 1.79175946922805, 0, 0, 0.693147180559945, 2.19722457733622, 2.19722457733622, 2.484906649788, 0.693147180559945, 1.94591014905531, 1.38629436111989, 2.56494935746154, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 0, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 2.30258509299405, 2.07944154167984, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.38629436111989, 1.94591014905531, 1.6094379124341, 1.79175946922805, 2.70805020110221, 0.693147180559945, 2.99573227355399, 2.30258509299405, 1.94591014905531, 2.56494935746154, 1.79175946922805, 1.38629436111989, 1.6094379124341, 2.56494935746154, 2.39789527279837, 2.56494935746154, 1.6094379124341, 2.484906649788, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.79175946922805, 2.56494935746154, 1.6094379124341, 1.6094379124341, 2.19722457733622, 2.484906649788, 1.79175946922805, 1.79175946922805, 1.09861228866811, 2.19722457733622, 0.693147180559945, 2.484906649788, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.83321334405622, 1.6094379124341, 1.79175946922805, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.56494935746154, 1.09861228866811, 2.07944154167984, 2.484906649788, 1.79175946922805, 1.79175946922805, 1.94591014905531, 2.07944154167984, 1.94591014905531, 2.19722457733622, 1.6094379124341, 2.07944154167984, 1.6094379124341, 2.484906649788, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0, 1.94591014905531, 2.19722457733622, 0.693147180559945, 2.484906649788, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.79175946922805, 2.70805020110221, 0.693147180559945, 2.19722457733622, 2.07944154167984, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.6094379124341, 2.07944154167984, 2.30258509299405, 1.09861228866811, 1.38629436111989, 2.484906649788, 0, 2.63905732961526, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.38629436111989, 2.19722457733622, 3.17805383034795, 0.693147180559945, 2.484906649788, 1.6094379124341, 0.693147180559945, 0.693147180559945, 2.19722457733622, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.484906649788, 2.56494935746154, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.38629436111989, 2.484906649788, 1.6094379124341, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.484906649788, 1.38629436111989, 1.38629436111989, 2.30258509299405, 1.38629436111989, 2.39789527279837, 2.39789527279837, 2.19722457733622, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.94591014905531, 2.484906649788, 1.94591014905531, 3.04452243772342, 1.94591014905531, 2.07944154167984, 2.484906649788, 1.09861228866811, 1.6094379124341, 2.19722457733622, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.6094379124341, 2.70805020110221, 1.6094379124341, 2.94443897916644, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.39789527279837, 1.38629436111989, 0, 2.07944154167984, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.94591014905531, 2.39789527279837, 2.63905732961526, 0.693147180559945, 1.38629436111989, 2.70805020110221, 2.484906649788, 1.94591014905531, 1.79175946922805, 0, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.79175946922805, 0.693147180559945, 2.19722457733622, 0, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.30258509299405, 1.38629436111989, 2.07944154167984, 0, 1.38629436111989, 1.38629436111989, 2.39789527279837, 2.30258509299405, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.6094379124341, 2.56494935746154, 1.79175946922805, 1.09861228866811, 2.484906649788, 2.07944154167984, 1.6094379124341, 1.94591014905531, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.30258509299405, 2.39789527279837, 2.19722457733622, 2.19722457733622, 1.94591014905531, 0.693147180559945, 2.07944154167984, 2.19722457733622, 2.77258872223978, 2.56494935746154, 2.56494935746154, 1.6094379124341, 2.19722457733622, 1.09861228866811, 2.77258872223978, 1.79175946922805, 2.30258509299405, 2.19722457733622, 2.07944154167984, 1.09861228866811, 1.6094379124341, 0, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.6094379124341, 2.56494935746154, 1.6094379124341, 1.6094379124341, 2.07944154167984, 2.94443897916644, 1.79175946922805, 2.39789527279837, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.38629436111989, 2.39789527279837, 1.38629436111989, 1.6094379124341, 2.30258509299405, 2.07944154167984, 2.484906649788, 1.94591014905531, 1.6094379124341, 2.30258509299405, 1.38629436111989, 1.94591014905531, 2.19722457733622, 2.89037175789616, 0.693147180559945, 1.6094379124341, 2.30258509299405, 2.39789527279837, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.39789527279837, 1.94591014905531, 1.09861228866811, 2.56494935746154, 1.79175946922805, 2.89037175789616, 1.38629436111989, 2.30258509299405, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.19722457733622, 0.693147180559945, 1.6094379124341, 3.13549421592915, 2.30258509299405, 2.70805020110221, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 2.99573227355399, 1.6094379124341, 2.94443897916644, 2.39789527279837, 2.89037175789616, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.38629436111989, 1.38629436111989, 2.07944154167984, 1.38629436111989, 2.63905732961526, 1.94591014905531, 1.09861228866811, 3.09104245335832, 2.19722457733622, 2.39789527279837, 2.83321334405622, 1.38629436111989, 1.94591014905531, 1.6094379124341, 2.77258872223978, 1.09861228866811, 2.63905732961526, 1.6094379124341, 2.484906649788, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.38629436111989, 2.39789527279837, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 2.30258509299405, 1.38629436111989, 1.79175946922805, 1.94591014905531, 2.19722457733622, 1.79175946922805, 1.94591014905531, 2.30258509299405, 1.94591014905531, 2.19722457733622, 1.94591014905531, 1.38629436111989, 2.56494935746154, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.77258872223978, 1.6094379124341, 1.79175946922805, 1.94591014905531, 2.19722457733622, 2.19722457733622, 1.94591014905531, 2.77258872223978, 1.79175946922805, 2.07944154167984, 2.19722457733622, 1.6094379124341, 1.38629436111989, 1.38629436111989, 0, 1.38629436111989, 2.39789527279837, 2.39789527279837, 2.63905732961526, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0, 2.39789527279837, 1.79175946922805, 0.693147180559945, 2.56494935746154, 2.19722457733622, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0, 1.79175946922805, 1.6094379124341, 1.94591014905531, 2.30258509299405, 1.79175946922805, 2.07944154167984, 1.09861228866811, 2.39789527279837, 2.07944154167984, 2.19722457733622, 1.94591014905531, 0, 1.38629436111989, 2.484906649788, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.38629436111989, 3.04452243772342, 1.6094379124341, 2.77258872223978, 1.38629436111989, 1.09861228866811, 2.07944154167984, 2.07944154167984, 2.07944154167984, 1.94591014905531, 2.07944154167984, 1.38629436111989, 2.19722457733622, 2.94443897916644, 1.79175946922805, 1.6094379124341, 2.07944154167984, 2.39789527279837, 2.07944154167984, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.38629436111989, 3.04452243772342, 1.38629436111989, 1.94591014905531, 2.63905732961526, 2.484906649788, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.39789527279837, 2.19722457733622, 1.6094379124341, 1.09861228866811, 2.39789527279837, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.63905732961526, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.6094379124341, 1.79175946922805, 2.30258509299405, 2.30258509299405, 1.6094379124341, 2.30258509299405, 2.19722457733622, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.30258509299405, 2.07944154167984, 1.38629436111989, 0, 2.07944154167984, 2.39789527279837, 2.56494935746154, 1.38629436111989, 1.6094379124341, 0, 1.94591014905531, 2.19722457733622, 2.07944154167984, 2.484906649788, 1.6094379124341, 3.29583686600433, 1.6094379124341, 1.79175946922805, 0, 0, 2.56494935746154, 2.484906649788, 2.39789527279837, 1.09861228866811, 2.63905732961526, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 2.39789527279837, 1.79175946922805, 1.79175946922805, 1.09861228866811, 1.79175946922805, 2.83321334405622, 1.94591014905531, 1.79175946922805, 2.99573227355399, 2.70805020110221, 2.39789527279837, 2.30258509299405, 0, 1.38629436111989, 2.484906649788, 0.693147180559945, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0, 1.79175946922805, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.38629436111989, 0, 2.30258509299405, 1.09861228866811, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.6094379124341, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.56494935746154, 1.94591014905531, 0, 0.693147180559945, 2.63905732961526, 0.693147180559945, 1.09861228866811, 2.30258509299405, 2.30258509299405, 1.09861228866811, 2.30258509299405, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.94591014905531, 1.79175946922805, 2.63905732961526, 1.79175946922805, 2.19722457733622, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.09861228866811, 2.07944154167984, 0, 1.09861228866811, 2.39789527279837, 1.6094379124341, 2.07944154167984, 2.30258509299405, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.19722457733622, 2.484906649788, 1.6094379124341, 1.94591014905531, 1.94591014905531, 1.6094379124341, 1.79175946922805, 3.2188758248682, 1.94591014905531, 2.30258509299405, 2.19722457733622, 2.56494935746154, 1.94591014905531, 1.94591014905531, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.79175946922805, 2.07944154167984, 1.6094379124341, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.09861228866811, 2.63905732961526, 1.6094379124341, 2.07944154167984, 2.77258872223978, 2.07944154167984, 1.6094379124341, 1.79175946922805, 0.693147180559945, 0.693147180559945, 2.39789527279837, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.30258509299405, 2.484906649788, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 2.30258509299405, 1.94591014905531, 2.07944154167984, 2.19722457733622, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.63905732961526, 2.39789527279837, 0.693147180559945, 2.56494935746154, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.6094379124341, 2.484906649788, 2.30258509299405, 2.07944154167984, 2.39789527279837, 1.6094379124341, 1.94591014905531, 2.30258509299405, 2.07944154167984, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.19722457733622, 2.484906649788, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.484906649788, 1.6094379124341, 1.09861228866811, 1.94591014905531, 0, 2.07944154167984, 2.07944154167984, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.94591014905531, 1.09861228866811, 2.99573227355399, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 2.19722457733622, 2.56494935746154, 1.38629436111989, 2.484906649788, 1.79175946922805, 1.09861228866811, 2.39789527279837, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.38629436111989, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.79175946922805, 2.99573227355399, 1.09861228866811, 2.19722457733622, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.39789527279837, 1.6094379124341, 0.693147180559945, 1.38629436111989, 2.94443897916644, 1.6094379124341, 1.94591014905531, 2.77258872223978, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.79175946922805, 0, 1.6094379124341, 2.07944154167984, 1.38629436111989, 0, 0, 1.94591014905531, 0, 1.79175946922805, 2.63905732961526, 2.56494935746154, 2.89037175789616, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.07944154167984, 2.07944154167984, 1.38629436111989, 2.30258509299405, 2.30258509299405, 2.19722457733622, 2.99573227355399, 1.6094379124341, 2.63905732961526, 1.6094379124341, 2.30258509299405, 0.693147180559945, 1.09861228866811, 2.77258872223978, 2.30258509299405, 2.39789527279837, 2.63905732961526, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 2.39789527279837, 2.484906649788, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.09861228866811, 2.56494935746154, 1.79175946922805, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.79175946922805, 1.79175946922805, 1.94591014905531, 2.39789527279837, 1.6094379124341, 1.94591014905531, 1.38629436111989, 2.07944154167984, 2.07944154167984, 1.94591014905531, 2.77258872223978, 2.484906649788, 2.89037175789616, 2.07944154167984, 2.39789527279837, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.38629436111989, 2.19722457733622, 1.94591014905531, 1.38629436111989, 2.30258509299405, 2.07944154167984, 2.83321334405622, 2.83321334405622, 2.19722457733622, 1.94591014905531, 1.09861228866811, 1.94591014905531, 1.6094379124341, 2.484906649788, 1.6094379124341, 2.484906649788, 1.79175946922805, 2.30258509299405, 2.70805020110221, 1.79175946922805, 2.07944154167984, 1.94591014905531, 1.94591014905531, 2.07944154167984, 1.6094379124341, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.56494935746154, 2.07944154167984, 1.94591014905531, 2.19722457733622, 2.19722457733622, 0.693147180559945, 2.19722457733622, 2.07944154167984, 2.94443897916644, 1.79175946922805, 0, 1.94591014905531, 2.70805020110221, 1.79175946922805, 2.19722457733622, 2.484906649788, 2.63905732961526, 1.6094379124341, 2.39789527279837, 2.19722457733622, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.79175946922805, 2.39789527279837, 0, 1.09861228866811, 1.79175946922805, 2.484906649788, 2.39789527279837, 2.07944154167984, 1.09861228866811, 2.07944154167984, 0.693147180559945, 2.07944154167984, 1.94591014905531, 1.6094379124341, 0, 1.38629436111989, 1.38629436111989, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.79175946922805, 1.09861228866811, 2.30258509299405, 2.07944154167984, 1.38629436111989, 1.94591014905531, 2.99573227355399, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.19722457733622, 1.09861228866811, 2.30258509299405, 1.38629436111989, 2.39789527279837, 2.19722457733622, 1.79175946922805, 2.30258509299405, 1.09861228866811, 2.30258509299405, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.94591014905531, 2.70805020110221, 2.30258509299405, 1.38629436111989, 1.09861228866811, 1.79175946922805, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.09861228866811, 2.99573227355399, 2.07944154167984, 1.94591014905531, 0, 1.79175946922805, 1.6094379124341, 2.70805020110221, 0.693147180559945, 1.09861228866811, 2.89037175789616, 2.39789527279837, 1.94591014905531, 2.07944154167984, 2.30258509299405, 0, 2.19722457733622, 2.30258509299405, 1.09861228866811, 1.94591014905531, 2.30258509299405, 1.79175946922805, 3.09104245335832, 1.79175946922805, 2.19722457733622, 1.94591014905531, 2.30258509299405, 2.07944154167984, 2.07944154167984, 0, 2.63905732961526, 2.07944154167984, 2.30258509299405, 1.79175946922805, 2.19722457733622, 1.09861228866811, 0, 2.63905732961526, 2.39789527279837, 0.693147180559945, 0, 2.07944154167984, 1.6094379124341, 1.6094379124341, 2.56494935746154, 1.79175946922805, 2.30258509299405, 2.07944154167984, 1.79175946922805, 0, 1.79175946922805, 2.63905732961526, 2.89037175789616, 1.6094379124341, 1.38629436111989, 1.09861228866811, 2.39789527279837, 2.484906649788, 1.38629436111989, 1.38629436111989, 2.39789527279837, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0.693147180559945, 1.6094379124341, 1.09861228866811, 2.56494935746154, 1.38629436111989, 1.6094379124341, 2.77258872223978, 1.94591014905531, 2.39789527279837, 1.94591014905531, 1.6094379124341, 2.484906649788, 2.39789527279837, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.39789527279837, 2.77258872223978, 2.89037175789616, 0, 1.09861228866811, 1.79175946922805, 2.19722457733622, 2.07944154167984, 2.30258509299405, 1.09861228866811, 2.07944154167984, 2.56494935746154, 0.693147180559945, 2.19722457733622, 1.6094379124341, 2.83321334405622, 2.19722457733622, 1.94591014905531, 2.07944154167984, 1.09861228866811, 2.30258509299405, 2.39789527279837, 0.693147180559945, 2.07944154167984, 1.09861228866811, 2.89037175789616, 1.79175946922805, 2.77258872223978, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.09861228866811, 2.39789527279837, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.94591014905531, 2.63905732961526, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.79175946922805, 1.94591014905531, 2.19722457733622, 1.09861228866811, 2.484906649788, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.94591014905531, 2.19722457733622, 2.70805020110221, 2.484906649788, 0.693147180559945, 2.19722457733622, 1.94591014905531, 1.09861228866811, 2.07944154167984, 1.38629436111989, 1.94591014905531, 2.39789527279837, 0.693147180559945, 0, 0, 2.30258509299405, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.09861228866811, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.38629436111989, 1.94591014905531, 1.38629436111989, 2.07944154167984, 2.56494935746154, 2.30258509299405, 1.79175946922805, 2.77258872223978, 2.07944154167984, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.94591014905531, 2.39789527279837, 2.56494935746154, 1.09861228866811, 1.6094379124341, 2.07944154167984, 2.484906649788, 1.94591014905531, 1.38629436111989, 2.30258509299405, 1.94591014905531, 0.693147180559945, 1.6094379124341, 2.99573227355399, 2.07944154167984, 2.39789527279837, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.79175946922805, 1.94591014905531, 2.484906649788, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.79175946922805, 2.07944154167984, 2.07944154167984, 2.07944154167984, 1.79175946922805, 1.38629436111989, 2.99573227355399, 2.77258872223978, 2.83321334405622, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.30258509299405, 1.79175946922805, 2.30258509299405, 2.39789527279837, 1.09861228866811, 2.30258509299405, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 1.6094379124341, 2.39789527279837, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.6094379124341, 2.77258872223978, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0.693147180559945, 2.19722457733622, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 2.19722457733622, 1.38629436111989, 1.09861228866811, 0, 0, 2.30258509299405, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 2.30258509299405, 0.693147180559945, 0, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.63905732961526, 1.38629436111989, 1.79175946922805, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 2.56494935746154, 0, 0, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 2.39789527279837, 2.19722457733622, 0.693147180559945, 1.94591014905531, 1.38629436111989, 1.09861228866811, 2.30258509299405, 2.07944154167984, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.94591014905531, 0.693147180559945, 2.99573227355399, 2.70805020110221, 1.79175946922805, 0.693147180559945, 0.693147180559945, 2.484906649788, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0, 0, 1.6094379124341, 2.30258509299405, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 0, 2.07944154167984, 1.09861228866811, 1.09861228866811, 2.39789527279837, 0.693147180559945, 0, 1.6094379124341, 1.6094379124341, 1.94591014905531, 1.09861228866811, 0, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.38629436111989, 2.30258509299405, 2.39789527279837, 1.6094379124341, 0, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0, 0, 2.63905732961526, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0, 1.38629436111989, 0, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.79175946922805, 2.484906649788, 1.09861228866811, 2.07944154167984, 1.38629436111989, 0, 1.38629436111989, 1.94591014905531, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 2.07944154167984, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 3.17805383034795, 2.19722457733622, 2.56494935746154, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0, 2.19722457733622, 1.09861228866811, 1.79175946922805, 2.19722457733622, 1.09861228866811, 0, 1.38629436111989, 1.94591014905531, 1.38629436111989, 0, 2.07944154167984, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.30258509299405, 1.79175946922805, 0, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 0, 1.6094379124341, 1.94591014905531, 2.07944154167984, 0.693147180559945, 1.79175946922805, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.6094379124341, 1.09861228866811, 2.07944154167984, 1.6094379124341, 0, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.19722457733622, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 2.19722457733622, 0, 1.94591014905531, 0, 2.77258872223978, 1.38629436111989, 1.6094379124341, 0, 0, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.79175946922805, 2.39789527279837, 1.79175946922805, 2.30258509299405, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 2.07944154167984, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0, 1.09861228866811, 2.19722457733622, 0, 1.94591014905531, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.09861228866811, 2.39789527279837, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0.693147180559945, 2.30258509299405, 1.79175946922805, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.6094379124341, 2.07944154167984, 0.693147180559945, 1.38629436111989, 2.39789527279837, 1.94591014905531, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.94591014905531, 2.63905732961526, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.79175946922805, 2.30258509299405, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 1.38629436111989, 2.19722457733622, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.30258509299405, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.94591014905531, 0.693147180559945, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 2.63905732961526, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0, 1.09861228866811, 1.79175946922805, 2.484906649788, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 0.693147180559945, 0, 2.07944154167984, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 2.39789527279837, 0, 1.79175946922805, 1.38629436111989, 2.07944154167984, 2.07944154167984, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0, 0, 2.19722457733622, 1.79175946922805, 1.38629436111989, 1.38629436111989, 0, 0, 1.79175946922805, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 2.39789527279837, 1.94591014905531, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 2.07944154167984, 0, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.56494935746154, 0.693147180559945, 1.38629436111989, 0, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 2.94443897916644, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.6094379124341, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.94591014905531, 0, 1.79175946922805, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 1.38629436111989, 2.19722457733622, 0, 0, 2.56494935746154, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 2.19722457733622, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.94591014905531, 1.09861228866811, 2.56494935746154, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 2.70805020110221, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0, 0, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.6094379124341, 2.07944154167984, 0, 0.693147180559945, 1.79175946922805, 0, 1.79175946922805, 1.09861228866811, 0, 0, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0, 1.6094379124341, 2.19722457733622, 1.79175946922805, 0, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.79175946922805, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0, 0, 0, 0, 2.07944154167984, 0.693147180559945, 0, 1.79175946922805, 2.30258509299405, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.38629436111989, 0, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.79175946922805, 0, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.6094379124341, 0, 2.19722457733622, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 2.07944154167984, 0, 1.6094379124341, 0, 1.6094379124341, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.09861228866811, 2.19722457733622, 0, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.94591014905531, 1.6094379124341, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 2.07944154167984, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 2.07944154167984, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0, 0, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 0, 0, 1.94591014905531, 0, 1.09861228866811, 1.09861228866811, 2.39789527279837, 0.693147180559945, 2.63905732961526, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.19722457733622, 1.38629436111989, 1.94591014905531, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.38629436111989, 0, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.94591014905531, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 1.94591014905531, 0, 1.94591014905531, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.94591014905531, 2.39789527279837, 1.6094379124341, 2.19722457733622, 1.79175946922805, 2.39789527279837, 2.07944154167984, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.07944154167984, 2.07944154167984, 1.09861228866811, 0, 2.30258509299405, 0.693147180559945, 0.693147180559945, 2.19722457733622, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 1.94591014905531, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 2.39789527279837, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0, 2.484906649788, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.09861228866811, 2.30258509299405, 0.693147180559945, 0.693147180559945, 0, 2.19722457733622, 1.38629436111989, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.94591014905531, 2.19722457733622, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0, 2.484906649788, 1.6094379124341, 2.07944154167984, 0, 0, 2.19722457733622, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.94591014905531, 2.19722457733622, 1.94591014905531, 2.07944154167984, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0.693147180559945, 2.19722457733622, 1.94591014905531, 1.94591014905531, 0, 1.79175946922805, 0, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0.693147180559945, 2.07944154167984, 2.07944154167984, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.94591014905531, 1.6094379124341, 0, 1.09861228866811, 2.39789527279837, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0, 1.38629436111989, 1.09861228866811, 1.94591014905531, 0, 0, 0.693147180559945, 1.38629436111989, 1.94591014905531, 0, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 2.07944154167984, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 2.484906649788, 1.09861228866811, 0, 2.484906649788, 2.30258509299405, 0, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.6094379124341, 2.56494935746154, 1.94591014905531, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 0, 1.94591014905531, 2.07944154167984, 2.07944154167984, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.38629436111989, 2.39789527279837, 0, 2.39789527279837, 1.38629436111989, 0, 2.19722457733622, 1.09861228866811, 0, 1.6094379124341, 2.30258509299405, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.6094379124341, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 2.39789527279837, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.07944154167984, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.09861228866811, 1.79175946922805, 1.94591014905531, 0, 0, 1.94591014905531, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 0, 1.6094379124341, 2.77258872223978, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 2.30258509299405, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.79175946922805, 0.693147180559945, 1.79175946922805, 0, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 0, 0.693147180559945, 0, 1.79175946922805, 1.94591014905531, 2.484906649788, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 0, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0.693147180559945, 2.19722457733622, 1.09861228866811, 2.19722457733622, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 0, 0, 1.38629436111989, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.07944154167984, 2.30258509299405, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.94591014905531, 1.38629436111989, 2.19722457733622, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.94591014905531, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 2.19722457733622, 2.30258509299405, 0, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.09861228866811, 0.693147180559945, 2.07944154167984, 1.6094379124341, 1.6094379124341, 0, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.09861228866811, 2.07944154167984, 2.07944154167984, 1.09861228866811, 0.693147180559945, 2.70805020110221, 0.693147180559945, 2.30258509299405, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.6094379124341, 0, 2.39789527279837, 1.09861228866811, 1.94591014905531, 1.09861228866811, 2.70805020110221, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.484906649788, 1.09861228866811, 0, 0.693147180559945, 2.19722457733622, 1.38629436111989, 1.94591014905531, 0, 0, 1.6094379124341, 1.6094379124341, 0, 0, 1.09861228866811, 0.693147180559945, 0, 2.30258509299405, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0, 2.07944154167984, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.79175946922805, 2.77258872223978, 1.38629436111989, 1.38629436111989, 1.6094379124341, 2.19722457733622, 1.79175946922805, 1.6094379124341, 0.693147180559945, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.38629436111989, 0, 0, 2.30258509299405, 0, 1.79175946922805, 1.6094379124341, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0, 0, 0, 1.38629436111989, 2.07944154167984, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 2.19722457733622, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.07944154167984, 2.83321334405622, 1.6094379124341, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0, 1.79175946922805, 0.693147180559945, 0, 1.38629436111989, 1.94591014905531, 1.6094379124341, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 0, 2.30258509299405, 1.6094379124341, 1.38629436111989, 1.09861228866811, 2.19722457733622, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.07944154167984, 1.79175946922805, 2.19722457733622, 1.94591014905531, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.79175946922805, 2.39789527279837, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0, 0, 1.09861228866811, 0.693147180559945, 1.94591014905531, 2.63905732961526, 1.79175946922805, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.39789527279837, 0, 2.30258509299405, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.94591014905531, 2.07944154167984, 0, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.30258509299405, 1.38629436111989, 0, 2.07944154167984, 1.38629436111989, 2.07944154167984, 1.6094379124341, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 1.38629436111989, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.38629436111989, 2.07944154167984, 0, 0, 2.07944154167984, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 2.07944154167984, 1.79175946922805, 1.38629436111989, 2.30258509299405, 0.693147180559945, 1.6094379124341, 2.07944154167984, 1.09861228866811, 2.07944154167984, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 2.07944154167984, 2.484906649788, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0, 1.79175946922805, 0, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.07944154167984, 1.79175946922805, 1.09861228866811, 1.09861228866811, 2.63905732961526, 1.6094379124341, 2.39789527279837, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0, 1.6094379124341, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 2.19722457733622, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0, 0, 1.6094379124341, 1.94591014905531, 0, 0, 0, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.38629436111989, 2.07944154167984, 0, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.63905732961526, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.09861228866811, 2.484906649788, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.94591014905531, 0.693147180559945, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.38629436111989, 3.58351893845611, 1.09861228866811, 1.09861228866811, 1.79175946922805, 0, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0, 0, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 2.39789527279837, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0, 2.484906649788, 0.693147180559945, 0, 1.38629436111989, 1.94591014905531, 0, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.94591014905531, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 0, 0, 0, 1.38629436111989, 1.38629436111989, 0, 0, 0.693147180559945, 2.07944154167984, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.94591014905531, 2.07944154167984, 0.693147180559945, 0, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0, 0, 1.6094379124341, 1.6094379124341, 0, 0.693147180559945, 0, 2.484906649788, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0, 1.79175946922805, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 1.79175946922805, 0, 1.79175946922805, 1.79175946922805, 0, 1.94591014905531, 0, 1.38629436111989, 1.79175946922805, 0, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 0, 0.693147180559945, 0, 0, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 0.693147180559945, 1.6094379124341, 1.79175946922805, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0, 0, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0, 0.693147180559945, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0, 0, 0, 2.39789527279837, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 1.6094379124341, 1.79175946922805, 2.07944154167984, 2.70805020110221, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 0, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 2.94443897916644, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 0, 0, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0.693147180559945, 0, 0, 0, 0.693147180559945, 1.79175946922805, 0, 0, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 0, 1.94591014905531, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 0, 0, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 2.30258509299405, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.19722457733622, 0, 2.19722457733622, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 0, 0.693147180559945, 0, 2.19722457733622, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 2.99573227355399, 0, 0, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.6094379124341, 2.07944154167984, 0, 0.693147180559945, 0, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 0, 1.09861228866811, 0, 1.6094379124341, 1.79175946922805, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 1.38629436111989, 1.38629436111989, 0, 1.6094379124341, 0, 1.38629436111989, 0.693147180559945, 0, 1.79175946922805, 0, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 0, 0, 1.09861228866811, 1.6094379124341, 0, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 0, 0, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 2.30258509299405, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0, 1.6094379124341, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0, 1.09861228866811, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.79175946922805, 0, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 0, 0, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 2.77258872223978, 1.6094379124341, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0, 0, 0, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 1.6094379124341, 0, 0, 0.693147180559945, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0, 0, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 0, 0, 2.07944154167984, 0, 0, 1.6094379124341, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0, 0, 2.07944154167984, 0, 0, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 1.79175946922805, 0, 2.30258509299405, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 1.09861228866811, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0, 1.6094379124341, 0, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 1.6094379124341, 0, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 2.19722457733622, 1.79175946922805, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 2.07944154167984, 1.09861228866811, 1.38629436111989, 0, 0, 0, 0, 1.09861228866811, 0, 2.07944154167984, 1.09861228866811, 0, 0, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0.693147180559945, 2.30258509299405, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0, 0, 1.09861228866811, 1.6094379124341, 0, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.94591014905531, 0, 1.94591014905531, 0.693147180559945, 0, 2.39789527279837, 2.77258872223978, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 0, 2.19722457733622, 1.09861228866811, 2.19722457733622, 0, 0, 1.6094379124341, 2.19722457733622, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 2.30258509299405, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0, 0, 0, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 2.63905732961526, 0, 0, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 2.19722457733622, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 0, 1.94591014905531, 0.693147180559945, 0, 0, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0, 2.19722457733622, 1.38629436111989, 2.30258509299405, 0, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 0, 1.38629436111989, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 0, 0, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0, 0, 0, 0, 0, 1.09861228866811, 0, 0, 1.6094379124341, 0, 1.79175946922805, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 1.38629436111989, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 0, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 0, 2.19722457733622, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 2.07944154167984, 1.09861228866811, 0, 0, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.79175946922805, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 2.07944154167984, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 0, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.484906649788, 1.09861228866811, 1.09861228866811, 2.30258509299405, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.94591014905531, 1.79175946922805, 1.79175946922805, 0, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0, 1.09861228866811, 0, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 2.19722457733622, 0, 0, 0.693147180559945, 2.30258509299405, 1.09861228866811, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 1.79175946922805, 1.6094379124341, 0, 0.693147180559945, 1.38629436111989, 0, 2.07944154167984, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 0, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 2.19722457733622, 0.693147180559945, 1.38629436111989, 0, 0, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0, 0, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0, 0, 1.79175946922805, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 2.83321334405622, 0, 0.693147180559945, 1.38629436111989, 0, 1.38629436111989, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 1.38629436111989, 0, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0, 1.94591014905531, 1.09861228866811, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 1.38629436111989, 2.07944154167984, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.38629436111989, 0, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 2.19722457733622, 1.94591014905531, 0, 1.38629436111989, 1.79175946922805, 2.39789527279837, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 2.56494935746154, 1.94591014905531, 1.38629436111989, 0, 0.693147180559945, 2.39789527279837, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 2.39789527279837, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.94591014905531, 2.07944154167984, 1.38629436111989, 0, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.94591014905531, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 2.63905732961526, 1.38629436111989, 0.693147180559945, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.94591014905531, 0.693147180559945, 0, 1.94591014905531, 1.09861228866811, 2.484906649788, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 2.07944154167984, 2.39789527279837, 2.30258509299405, 1.09861228866811, 1.38629436111989, 0.693147180559945, 2.484906649788, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.6094379124341, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0, 1.79175946922805, 2.30258509299405, 2.07944154167984, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0, 0, 2.56494935746154, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.07944154167984, 2.07944154167984, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 2.39789527279837, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.38629436111989, 0, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 2.39789527279837, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.6094379124341, 1.6094379124341, 0, 1.79175946922805, 2.30258509299405, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.63905732961526, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.79175946922805, 0, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.94591014905531, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 2.89037175789616, 1.94591014905531, 1.79175946922805, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0.693147180559945, 2.39789527279837, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0, 2.07944154167984, 1.09861228866811, 2.07944154167984, 2.07944154167984, 0.693147180559945, 2.07944154167984, 1.79175946922805, 3.09104245335832, 2.19722457733622, 0.693147180559945, 0.693147180559945, 2.30258509299405, 1.38629436111989, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0, 1.38629436111989, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.79175946922805, 2.63905732961526, 0.693147180559945, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.09861228866811, 2.19722457733622, 1.38629436111989, 0, 1.79175946922805, 1.79175946922805, 1.6094379124341, 0, 1.38629436111989, 2.30258509299405, 1.09861228866811, 1.09861228866811, 2.30258509299405, 0, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.39789527279837, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.6094379124341, 2.07944154167984, 1.09861228866811, 2.19722457733622, 2.30258509299405, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0, 1.6094379124341, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.6094379124341, 2.07944154167984, 1.79175946922805, 1.6094379124341, 1.79175946922805, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.79175946922805, 1.09861228866811, 0, 1.6094379124341, 0, 2.07944154167984, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.30258509299405, 0, 1.79175946922805, 2.07944154167984, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.09861228866811, 2.19722457733622, 0, 0, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.94591014905531, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.94591014905531, 0.693147180559945, 1.6094379124341, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.6094379124341, 2.07944154167984, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 2.70805020110221, 0, 0.693147180559945, 1.09861228866811, 0, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.79175946922805, 2.19722457733622, 1.79175946922805, 2.39789527279837, 2.30258509299405, 2.30258509299405, 1.09861228866811, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.94591014905531, 2.30258509299405, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.39789527279837, 1.09861228866811, 2.19722457733622, 0.693147180559945, 0.693147180559945, 0, 2.30258509299405, 0, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.39789527279837, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.6094379124341, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.79175946922805, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0, 2.39789527279837, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 1.38629436111989, 1.94591014905531, 0.693147180559945, 0, 2.39789527279837, 1.38629436111989, 0, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.94591014905531, 0.693147180559945, 0, 2.484906649788, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.6094379124341, 1.38629436111989, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 0, 1.79175946922805, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 2.39789527279837, 1.09861228866811, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.6094379124341, 2.39789527279837, 0, 1.09861228866811, 2.30258509299405, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.30258509299405, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.79175946922805, 1.94591014905531, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.07944154167984, 1.38629436111989, 0.693147180559945, 0.693147180559945, 2.07944154167984, 2.07944154167984, 0.693147180559945, 2.39789527279837, 0, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.6094379124341, 0.693147180559945, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 2.484906649788, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0, 2.07944154167984, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 0, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.94591014905531, 0, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 1.79175946922805, 1.38629436111989, 1.94591014905531, 0, 0.693147180559945, 1.6094379124341, 2.484906649788, 1.6094379124341, 1.09861228866811, 1.09861228866811, 2.30258509299405, 0, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 2.07944154167984, 2.19722457733622, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 1.38629436111989, 0, 1.6094379124341, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0, 1.79175946922805, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.39789527279837, 2.56494935746154, 0.693147180559945, 0, 0, 0, 1.79175946922805, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0, 1.79175946922805, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.6094379124341, 1.6094379124341, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 1.09861228866811, 0, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 2.39789527279837, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 0, 0, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 0, 2.30258509299405, 1.79175946922805, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 1.94591014905531, 0, 1.6094379124341, 0, 0.693147180559945, 0, 1.38629436111989, 2.19722457733622, 1.6094379124341, 0, 1.6094379124341, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0, 0, 2.39789527279837, 0.693147180559945, 0, 0, 1.09861228866811, 0, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 2.19722457733622, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0.693147180559945, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.94591014905531, 1.38629436111989, 0, 0, 1.38629436111989, 1.38629436111989, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 2.63905732961526, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.6094379124341, 0, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 1.38629436111989, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 0, 0, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0, 0, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 2.07944154167984, 0, 0.693147180559945, 0, 0, 2.19722457733622, 0, 0, 0, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0, 2.484906649788, 1.38629436111989, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 0, 0, 1.79175946922805, 1.09861228866811, 0, 0, 0, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 2.39789527279837, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 0, 0, 0, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.38629436111989, 2.30258509299405, 0, 0, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.94591014905531, 0, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.19722457733622, 0.693147180559945, 1.79175946922805, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 1.6094379124341, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 2.30258509299405, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.38629436111989, 0, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 0, 0, 1.6094379124341, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 0, 0.693147180559945, 0, 1.09861228866811, 2.484906649788, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.79175946922805, 0, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.30258509299405, 0, 1.09861228866811, 0.693147180559945, 0, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0, 2.07944154167984, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0, 0, 0, 0, 1.6094379124341, 0, 1.09861228866811, 2.07944154167984, 0, 0, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 1.38629436111989, 0, 1.79175946922805, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 0, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 0, 0, 0, 2.19722457733622, 0, 1.79175946922805, 1.38629436111989, 1.09861228866811, 0, 0, 0, 1.09861228866811, 2.39789527279837, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 1.38629436111989, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.6094379124341, 2.07944154167984, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 2.19722457733622, 0, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0.693147180559945, 2.30258509299405, 0.693147180559945, 1.6094379124341, 1.94591014905531, 0, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 1.09861228866811, 0, 0, 1.09861228866811, 2.63905732961526, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.6094379124341, 2.77258872223978, 1.38629436111989, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 0, 1.94591014905531, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0, 0, 0, 0.693147180559945, 1.6094379124341, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0, 0, 0, 0.693147180559945, }; - var targets = new double[] { 1, 7, 7, 2, 3, 7, 7, 7, 2, 7, 7, 7, 7, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 3, 7, 2, 7, 7, 7, 7, 7, 5, 3, 7, 7, 7, 7, 7, 7, 3, 7, 7, 6, 7, 7, 2, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 2, 2, 7, 7, 17, 7, 2, 7, 1, 7, 7, 7, 7, 7, 7, 17, 7, 7, 7, 7, 7, 17, 7, 2, 7, 7, 1, 7, 1, 7, 7, 17, 7, 7, 1, 7, 7, 7, 7, 7, 7, 7, 2, 2, 2, 2, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 0, 7, 1, 7, 1, 7, 7, 7, 7, 2, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 2, 2, 7, 2, 2, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 2, 2, 7, 2, 7, 1, 7, 2, 7, 7, 1, 7, 7, 7, 5, 2, 2, 7, 7, 2, 7, 7, 7, 7, 7, 17, 7, 2, 2, 1, 2, 7, 2, 7, 7, 2, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 2, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 2, 7, 7, 17, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 2, 7, 7, 2, 7, 14, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1, 7, 7, 7, 7, 7, 7, 2, 7, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 7, 7, 3, 2, 2, 7, 7, 2, 2, 2, 2, 7, 7, 1, 2, 2, 2, 2, 2, 2, 17, 17, 2, 7, 7, 2, 1, 2, 2, 2, 3, 2, 2, 2, 2, 2, 3, 3, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 3, 1, 2, 3, 2, 2, 3, 2, 3, 3, 2, 2, 2, 2, 2, 2, 1, 3, 3, 1, 3, 3, 1, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 3, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 1, 14, 2, 2, 2, 1, 3, 1, 2, 1, 2, 2, 2, 1, 1, 3, 2, 2, 1, 2, 1, 2, 7, 2, 2, 2, 1, 2, 4, 2, 2, 2, 3, 1, 3, 1, 3, 2, 3, 3, 2, 3, 2, 2, 2, 1, 2, 1, 2, 2, 3, 2, 2, 2, 1, 2, 2, 14, 2, 2, 2, 3, 3, 2, 1, 2, 3, 2, 2, 1, 3, 2, 2, 2, 2, 1, 1, 2, 2, 3, 2, 3, 3, 1, 1, 2, 2, 2, 1, 2, 3, 2, 3, 2, 2, 4, 14, 2, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 1, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 3, 3, 7, 3, 2, 2, 1, 1, 3, 2, 2, 2, 1, 2, 3, 2, 1, 2, 3, 2, 1, 2, 2, 1, 2, 2, 2, 2, 3, 1, 2, 3, 2, 3, 1, 2, 15, 2, 2, 2, 13, 3, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 1, 1, 2, 3, 2, 3, 2, 2, 2, 3, 3, 2, 3, 3, 2, 2, 2, 3, 3, 2, 2, 3, 2, 2, 3, 17, 3, 2, 2, 3, 3, 2, 2, 3, 2, 2, 3, 1, 2, 3, 3, 7, 2, 2, 3, 2, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 3, 3, 3, 1, 3, 2, 3, 1, 3, 2, 2, 3, 3, 2, 2, 3, 3, 3, 3, 3, 2, 3, 2, 3, 2, 3, 2, 1, 2, 2, 3, 2, 3, 1, 2, 3, 2, 2, 3, 2, 2, 1, 2, 3, 2, 2, 2, 2, 1, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 2, 1, 2, 1, 1, 13, 15, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 1, 2, 14, 1, 1, 14, 1, 16, 15, 2, 2, 3, 2, 2, 2, 2, 14, 2, 1, 2, 2, 2, 2, 14, 13, 5, 13, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 2, 13, 1, 14, 2, 1, 2, 1, 1, 2, 2, 1, 7, 2, 1, 1, 1, 1, 2, 2, 2, 3, 3, 1, 1, 2, 2, 2, 2, 14, 2, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 3, 2, 1, 2, 1, 14, 16, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 3, 2, 1, 2, 2, 14, 13, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 13, 14, 3, 2, 1, 3, 2, 2, 2, 6, 2, 2, 2, 1, 1, 14, 3, 13, 17, 1, 14, 2, 2, 1, 2, 3, 2, 3, 2, 2, 1, 1, 2, 2, 1, 2, 15, 3, 14, 1, 1, 2, 1, 1, 1, 1, 2, 2, 14, 1, 14, 1, 1, 2, 2, 1, 2, 2, 1, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 1, 1, 1, 16, 2, 2, 1, 2, 1, 14, 16, 14, 14, 14, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 3, 14, 13, 2, 14, 14, 1, 3, 1, 2, 14, 2, 1, 1, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 2, 1, 2, 2, 14, 2, 2, 1, 2, 2, 3, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 2, 2, 1, 2, 2, 2, 3, 1, 4, 2, 2, 2, 2, 1, 2, 1, 3, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 13, 1, 1, 2, 1, 13, 1, 2, 2, 2, 2, 2, 16, 1, 2, 1, 2, 1, 1, 2, 2, 3, 2, 1, 2, 1, 2, 1, 2, 1, 1, 2, 3, 2, 1, 2, 2, 1, 2, 2, 3, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 5, 2, 1, 1, 2, 3, 2, 2, 2, 2, 3, 3, 2, 2, 2, 1, 1, 3, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 2, 1, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 14, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 2, 2, 2, 1, 3, 2, 3, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2, 17, 2, 2, 2, 3, 2, 1, 2, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 3, 1, 1, 2, 2, 2, 2, 2, 2, 2, 14, 3, 1, 13, 2, 2, 1, 2, 1, 1, 2, 2, 1, 1, 6, 2, 1, 1, 2, 3, 1, 2, 7, 2, 2, 2, 1, 1, 1, 2, 2, 13, 2, 1, 2, 14, 1, 1, 1, 2, 3, 1, 2, 2, 2, 2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 2, 6, 1, 2, 2, 2, 1, 1, 2, 1, 2, 5, 1, 2, 1, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 7, 2, 1, 1, 2, 2, 1, 2, 2, 13, 2, 2, 2, 1, 2, 3, 2, 2, 2, 2, 2, 1, 3, 3, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 17, 14, 14, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 1, 1, 1, 2, 2, 1, 2, 3, 1, 1, 2, 2, 2, 15, 2, 2, 1, 1, 16, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 14, 2, 1, 3, 1, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 3, 1, 1, 1, 1, 2, 2, 1, 3, 2, 3, 2, 2, 1, 2, 3, 14, 3, 17, 3, 2, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 3, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 1, 2, 3, 2, 3, 3, 2, 2, 13, 2, 1, 1, 1, 3, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 3, 3, 2, 2, 1, 2, 2, 3, 1, 2, 3, 1, 2, 2, 3, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 3, 3, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 2, 1, 2, 3, 3, 2, 2, 1, 1, 2, 2, 1, 2, 1, 2, 2, 2, 3, 3, 2, 1, 2, 2, 3, 2, 3, 3, 2, 1, 7, 2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 2, 2, 3, 1, 2, 2, 2, 1, 3, 2, 2, 2, 2, 7, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 3, 2, 1, 2, 1, 2, 3, 2, 1, 2, 2, 2, 17, 2, 1, 1, 2, 1, 2, 2, 3, 2, 2, 3, 1, 1, 1, 2, 2, 2, 2, 1, 3, 3, 3, 2, 2, 2, 2, 1, 1, 3, 2, 2, 2, 2, 3, 2, 1, 7, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 3, 3, 3, 2, 1, 2, 2, 2, 3, 3, 2, 2, 3, 3, 2, 2, 2, 3, 3, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 16, 1, 2, 1, 2, 2, 2, 2, 2, 3, 2, 2, 1, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 3, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 2, 13, 2, 1, 1, 2, 1, 1, 1, 1, 2, 3, 13, 2, 2, 2, 2, 2, 3, 2, 2, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 1, 1, 2, 2, 14, 1, 3, 2, 14, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 13, 2, 3, 2, 2, 2, 2, 2, 2, 17, 1, 1, 3, 15, 1, 2, 2, 2, 2, 14, 2, 2, 2, 14, 2, 1, 2, 2, 2, 3, 1, 2, 2, 2, 2, 1, 2, 1, 13, 1, 2, 1, 2, 2, 16, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 1, 17, 2, 2, 2, 1, 1, 1, 2, 1, 3, 13, 14, 2, 3, 2, 3, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 17, 3, 2, 1, 3, 1, 3, 6, 3, 2, 4, 3, 2, 3, 2, 2, 14, 1, 2, 14, 1, 3, 2, 2, 1, 2, 2, 3, 1, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 1, 14, 2, 1, 2, 2, 3, 2, 3, 3, 3, 2, 3, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 17, 1, 2, 15, 2, 2, 1, 3, 2, 1, 2, 2, 3, 2, 2, 16, 2, 1, 1, 2, 2, 2, 3, 1, 1, 2, 3, 2, 2, 2, 2, 3, 1, 2, 2, 3, 2, 3, 16, 2, 2, 3, 3, 3, 1, 1, 2, 2, 3, 3, 2, 2, 2, 3, 2, 2, 3, 2, 15, 14, 3, 2, 2, 2, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 3, 3, 2, 2, 3, 7, 3, 2, 2, 2, 2, 3, 2, 2, 5, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 3, 3, 3, 3, 3, 4, 2, 2, 2, 2, 2, 3, 2, 3, 3, 2, 3, 2, 13, 2, 3, 3, 14, 3, 2, 3, 2, 13, 3, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 3, 2, 3, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 3, 2, 1, 3, 3, 2, 5, 2, 2, 3, 3, 2, 3, 1, 3, 3, 2, 3, 2, 2, 3, 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 2, 3, 2, 3, 3, 3, 3, 2, 2, 2, 3, 2, 2, 3, 2, 2, 3, 3, 3, 3, 3, 2, 3, 3, 3, 15, 2, 2, 2, 3, 3, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 2, 3, 2, 3, 3, 7, 1, 2, 2, 2, 3, 3, 2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 2, 13, 2, 3, 3, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 16, 2, 2, 2, 2, 2, 3, 2, 3, 3, 2, 2, 13, 2, 3, 2, 2, 2, 2, 2, 3, 3, 14, 2, 3, 2, 3, 3, 2, 3, 2, 2, 3, 13, 2, 2, 3, 2, 2, 2, 1, 2, 2, 3, 1, 14, 2, 2, 2, 2, 1, 2, 2, 1, 3, 2, 2, 2, 3, 3, 3, 2, 1, 2, 2, 14, 3, 3, 2, 3, 2, 2, 3, 2, 2, 16, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 13, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 2, 15, 2, 2, 15, 14, 13, 2, 3, 2, 2, 3, 3, 2, 2, 16, 2, 2, 2, 2, 2, 2, 3, 2, 14, 2, 2, 14, 13, 2, 2, 2, 3, 2, 14, 2, 2, 2, 1, 2, 13, 2, 2, 2, 2, 2, 3, 13, 2, 2, 2, 2, 2, 2, 2, 2, 13, 13, 6, 2, 3, 2, 14, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 13, 2, 2, 16, 2, 13, 13, 1, 2, 2, 3, 2, 2, 3, 3, 2, 2, 2, 2, 1, 3, 2, 3, 3, 3, 6, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 3, 2, 2, 3, 2, 3, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 3, 2, 3, 2, 2, 2, 2, 3, 2, 2, 2, 3, 3, 17, 2, 3, 3, 3, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 13, 3, 2, 2, 3, 2, 2, 3, 2, 14, 2, 2, 17, 17, 2, 2, 1, 2, 3, 1, 2, 1, 1, 15, 2, 2, 2, 3, 13, 2, 2, 13, 2, 2, 2, 3, 3, 2, 3, 2, 2, 13, 7, 3, 2, 3, 2, 14, 2, 3, 2, 2, 2, 13, 2, 3, 3, 2, 14, 14, 2, 2, 2, 3, 2, 2, 2, 2, 13, 2, 2, 2, 13, 2, 2, 2, 2, 2, 3, 2, 13, 2, 2, 2, 2, 2, 2, 14, 3, 2, 2, 2, 2, 17, 1, 3, 2, 2, 2, 13, 2, 3, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 2, 3, 2, 13, 2, 2, 2, 3, 3, 3, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 16, 2, 2, 4, 2, 2, 16, 16, 2, 2, 3, 2, 2, 2, 3, 13, 2, 2, 3, 2, 2, 2, 1, 2, 2, 3, 2, 3, 3, 2, 2, 2, 2, 2, 3, 3, 2, 4, 2, 2, 4, 3, 2, 2, 3, 2, 3, 2, 3, 3, 3, 2, 2, 3, 14, 2, 2, 2, 4, 2, 3, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 15, 3, 14, 2, 2, 2, 3, 2, 2, 1, 3, 3, 2, 3, 2, 2, 3, 3, 14, 3, 2, 13, 2, 2, 2, 3, 2, 3, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 3, 2, 2, 13, 3, 2, 2, 3, 2, 3, 2, 2, 3, 3, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 13, 2, 13, 2, 1, 1, 14, 3, 2, 2, 2, 1, 13, 2, 2, 13, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 14, 2, 2, 13, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 14, 2, 2, 13, 2, 2, 2, 3, 2, 2, 2, 2, 3, 1, 1, 2, 3, 2, 2, 2, 2, 3, 3, 2, 13, 2, 14, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 1, 2, 2, 2, 3, 2, 2, 1, 1, 1, 3, 2, 3, 2, 2, 3, 2, 3, 2, 3, 2, 2, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 2, 1, 2, 1, 3, 2, 2, 1, 2, 2, 2, 2, 13, 13, 2, 2, 1, 1, 2, 1, 3, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 1, 2, 2, 3, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 1, 2, 1, 2, 2, 2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 3, 2, 3, 3, 2, 1, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 1, 1, 1, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 3, 2, 3, 2, 2, 14, 15, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 17, 2, 2, 1, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 14, 2, 2, 2, 13, 2, 3, 2, 2, 1, 2, 13, 1, 2, 1, 2, 2, 1, 2, 2, 13, 2, 3, 3, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 14, 2, 17, 13, 1, 3, 2, 3, 2, 1, 3, 3, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 14, 13, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 1, 2, 3, 2, 16, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 3, 2, 2, 1, 2, 2, 2, 2, 2, 3, 1, 13, 1, 2, 1, 2, 2, 13, 1, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 3, 14, 3, 2, 1, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 2, 2, 1, 2, 3, 1, 2, 3, 2, 1, 1, 2, 13, 2, 2, 2, 2, 3, 3, 2, 2, 1, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 3, 16, 2, 2, 1, 2, 2, 2, 2, 3, 1, 3, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 14, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 1, 2, 2, 2, 1, 2, 1, 2, 2, 2, 1, 2, 3, 2, 1, 2, 2, 1, 2, 2, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 1, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 13, 2, 2, 2, 2, 2, 1, 14, 2, 2, 2, 13, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 13, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 13, 2, 2, 2, 14, 1, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 13, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 3, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 3, 2, 1, 1, 2, 2, 1, 2, 1, 1, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 14, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2, 2, 2, 13, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 14, 1, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 1, 13, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2, 2, 2, 1, 1, 2, 1, 1, 13, 7, 2, 1, 1, 2, 1, 1, 1, 2, 2, 7, 3, 1, 1, 1, 3, 2, 2, 2, 3, 2, 2, 2, 1, 2, 2, 2, 2, 13, 3, 7, 2, 7, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 2, 14, 5, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 4, 2, 2, 1, 2, 1, 2, 2, 2, 17, 1, 1, 1, 1, 1, 2, 3, 2, 2, 1, 1, 2, 2, 1, 1, 1, 2, 1, 1, 1, 6, 1, 2, 3, 2, 2, 2, 3, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 7, 17, 1, 3, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 1, 1, 2, 14, 2, 2, 1, 7, 14, 1, 2, 1, 1, 3, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 3, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 13, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 1, 7, 1, 1, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 2, 4, 3, 2, 3, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 2, 3, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 3, 1, 1, 2, 2, 2, 1, 1, 2, 7, 7, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 7, 2, 1, 2, 1, 2, 3, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 3, 2, 2, 3, 2, 2, 1, 2, 3, 2, 2, 2, 2, 3, 1, 2, 2, 1, 2, 1, 1, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 17, 2, 14, 2, 2, 2, 2, 14, 2, 2, 2, 3, 2, 1, 2, 2, 2, 2, 3, 3, 1, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 2, 3, 1, 7, 2, 3, 2, 2, 2, 2, 2, 7, 2, 2, 3, 2, 4, 2, 2, 3, 2, 3, 2, 2, 3, 7, 2, 2, 2, 5, 3, 2, 2, 2, 2, 2, 2, 14, 3, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 3, 2, 1, 2, 1, 2, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 2, 1, 1, 7, 1, 1, 2, 2, 17, 2, 2, 1, 1, 2, 17, 2, 1, 13, 1, 17, 7, 2, 1, 2, 1, 13, 2, 1, 2, 2, 2, 1, 1, 2, 14, 2, 2, 17, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 14, 2, 7, 1, 4, 2, 17, 13, 7, 2, 1, 2, 2, 2, 1, 2, 1, 1, 7, 13, 7, 1, 2, 13, 1, 2, 2, 2, 7, 1, 2, 2, 2, 14, 1, 2, 2, 1, 7, 2, 1, 2, 2, 2, 2, 2, 14, 2, 3, 1, 1, 2, 2, 14, 2, 2, 2, 7, 2, 17, 1, 14, 2, 2, 2, 2, 17, 2, 3, 3, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 1, 2, 7, 7, 13, 1, 2, 2, 2, 3, 1, 3, 2, 5, 14, 7, 2, 1, 1, 7, 1, 1, 7, 1, 1, 2, 2, 17, 1, 2, 7, 7, 2, 1, 1, 13, 7, 1, 2, 1, 1, 1, 2, 2, 1, 17, 7, 1, 1, 1, 2, 7, 1, 1, 17, 1, 17, 1, 2, 1, 1, 1, 2, 13, 13, 2, 15, 7, 2, 7, 7, 2, 2, 7, 2, 1, 0, 2, 2, 14, 2, 1, 2, 1, 2, 1, 14, 14, 3, 3, 17, 2, 1, 1, 1, 1, 1, 1, 3, 2, 1, 14, 2, 1, 2, 1, 1, 3, 3, 1, 2, 1, 2, 1, 3, 3, 2, 1, 2, 2, 3, 2, 2, 3, 2, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 13, 14, 1, 3, 2, 17, 1, 2, 2, 2, 13, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2, 15, 2, 2, 1, 1, 2, 2, 4, 2, 1, 2, 1, 2, 2, 3, 2, 2, 2, 2, 1, 2, 2, 3, 2, 3, 1, 2, 2, 2, 1, 2, 7, 2, 1, 1, 3, 2, 14, 2, 2, 2, 2, 3, 1, 1, 1, 13, 1, 2, 2, 3, 14, 12, 14, 2, 2, 1, 2, 1, 1, 15, 1, 2, 3, 2, 3, 14, 1, 2, 2, 1, 2, 7, 2, 1, 2, 1, 1, 2, 2, 1, 2, 13, 3, 2, 2, 1, 2, 1, 2, 3, 2, 3, 1, 1, 3, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 13, 2, 1, 12, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 14, 1, 2, 3, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 13, 3, 3, 3, 2, 3, 2, 1, 3, 1, 2, 2, 7, 2, 1, 2, 1, 2, 2, 3, 2, 14, 3, 2, 2, 1, 2, 2, 2, 3, 2, 2, 2, 3, 1, 1, 2, 14, 1, 2, 1, 1, 2, 2, 2, 2, 2, 15, 2, 3, 3, 2, 2, 3, 1, 1, 2, 2, 2, 3, 2, 14, 2, 1, 3, 3, 2, 2, 3, 2, 2, 2, 3, 4, 2, 2, 2, 12, 15, 2, 14, 14, 1, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 1, 2, 3, 7, 2, 2, 13, 2, 13, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 13, 2, 3, 14, 3, 2, 1, 14, 3, 2, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 3, 2, 2, 3, 2, 1, 14, 2, 14, 2, 2, 3, 2, 2, 1, 2, 2, 2, 2, 14, 2, 2, 2, 2, 1, 3, 2, 1, 1, 2, 2, 2, 2, 14, 2, 2, 14, 13, 2, 1, 3, 2, 1, 2, 3, 1, 1, 3, 2, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 7, 2, 1, 2, 2, 7, 2, 2, 1, 14, 1, 3, 13, 2, 2, 1, 15, 2, 2, 2, 2, 2, 2, 1, 1, 13, 2, 2, 4, 2, 3, 2, 3, 1, 2, 2, 2, 2, 1, 2, 1, 1, 1, 1, 1, 3, 1, 1, 3, 14, 3, 2, 1, 2, 2, 2, 1, 2, 1, 2, 13, 1, 2, 2, 2, 2, 1, 2, 14, 1, 2, 2, 1, 2, 1, 1, 1, 3, 2, 2, 2, 13, 2, 1, 2, 13, 2, 2, 2, 3, 1, 1, 1, 1, 1, 14, 3, 1, 2, 2, 3, 2, 3, 2, 1, 2, 1, 14, 2, 1, 2, 1, 2, 2, 2, 2, 13, 14, 2, 13, 1, 2, 2, 2, 2, 4, 1, 1, 2, 14, 1, 2, 1, 3, 2, 1, 1, 1, 2, 2, 2, 6, 2, 2, 2, 14, 2, 2, 14, 2, 2, 1, 1, 1, 1, 2, 17, 2, 1, 2, 1, 3, 12, 1, 2, 2, 2, 1, 3, 1, 1, 2, 1, 2, 1, 2, 1, 1, 1, 14, 2, 13, 13, 1, 2, 13, 1, 1, 1, 2, 14, 1, 3, 3, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 13, 1, 1, 1, 12, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 6, 1, 13, 2, 1, 13, 13, 2, 1, 1, 1, 1, 1, 13, 3, 2, 2, 2, 2, 1, 2, 1, 2, 6, 1, 1, 3, 1, 17, 1, 12, 2, 3, 2, 1, 2, 1, 1, 1, 2, 3, 13, 3, 3, 3, 2, 1, 13, 1, 1, 14, 7, 2, 17, 1, 1, 1, 1, 14, 1, 12, 2, 7, 1, 12, 2, 2, 1, 2, 14, 13, 1, 3, 13, 2, 1, 2, 1, 2, 2, 1, 14, 2, 3, 1, 1, 17, 1, 1, 13, 1, 2, 2, 1, 13, 1, 3, 17, 1, 1, 1, 2, 13, 1, 1, 14, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 2, 17, 1, 2, 1, 17, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 3, 1, 13, 1, 1, 2, 1, 2, 1, 1, 1, 2, 1, 16, 6, 2, 1, 2, 1, 1, 17, 1, 3, 1, 2, 2, 2, 1, 2, 1, 2, 1, 3, 1, 2, 2, 17, 2, 1, 3, 1, 2, 1, 1, 12, 13, 2, 2, 2, 1, 1, 2, 2, 1, 2, 1, 2, 2, 1, 12, 13, 3, 1, 1, 1, 2, 17, 2, 2, 1, 1, 1, 1, 1, 12, 1, 2, 2, 1, 1, 1, 13, 1, 1, 1, 1, 13, 1, 14, 1, 1, 1, 1, 1, 1, 2, 2, 13, 1, 2, 1, 2, 2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 2, 1, 1, 1, 1, 2, 14, 17, 2, 2, 2, 17, 1, 2, 2, 13, 1, 1, 2, 13, 2, 2, 1, 1, 2, 1, 1, 2, 1, 2, 2, 1, 1, 1, 1, 1, 1, 17, 1, 17, 3, 3, 2, 2, 1, 13, 1, 13, 12, 2, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 2, 2, 12, 1, 17, 2, 12, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 7, 2, 2, 1, 2, 2, 1, 3, 2, 1, 1, 1, 2, 2, 17, 3, 1, 2, 2, 2, 1, 3, 17, 2, 1, 2, 1, 2, 1, 2, 1, 1, 1, 2, 12, 17, 1, 1, 2, 1, 1, 1, 2, 13, 2, 2, 1, 1, 15, 2, 17, 1, 14, 1, 1, 1, 3, 1, 2, 2, 2, 2, 2, 3, 1, 1, 2, 1, 1, 1, 2, 1, 13, 7, 1, 1, 17, 2, 14, 2, 1, 1, 2, 4, 1, 2, 1, 1, 14, 2, 2, 1, 2, 17, 17, 2, 1, 1, 2, 17, 17, 17, 2, 1, 1, 2, 3, 2, 2, 13, 3, 13, 1, 17, 17, 17, 17, 2, 7, 2, 1, 17, 1, 2, 1, 15, 3, 13, 1, 3, 1, 2, 12, 15, 3, 2, 1, 1, 3, 2, 1, 2, 1, 2, 17, 2, 1, 3, 1, 1, 3, 1, 1, 1, 2, 15, 17, 1, 13, 12, 2, 2, 1, 1, 13, 1, 1, 1, 17, 1, 1, 1, 2, 12, 2, 1, 1, 1, 1, 17, 1, 17, 1, 2, 12, 17, 17, 2, 3, 14, 15, 2, 14, 17, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 14, 1, 1, 17, 2, 2, 12, 1, 1, 1, 1, 1, 1, 1, 14, 17, 1, 1, 2, 1, 17, 3, 1, 1, 2, 13, 3, 1, 1, 1, 1, 1, 13, 1, 1, 1, 13, 1, 2, 1, 3, 2, 13, 7, 1, 14, 14, 14, 13, 17, 13, 1, 3, 1, 17, 17, 13, 1, 1, 1, 12, 17, 1, 1, 13, 15, 1, 2, 1, 1, 1, 1, 2, 1, 1, 12, 12, 13, 1, 1, 2, 12, 12, 1, 3, 17, 2, 12, 17, 1, 1, 3, 2, 1, 3, 12, 2, 2, 3, 2, 14, 14, 1, 1, 2, 12, 1, 12, 12, 14, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 13, 13, 17, 2, 1, 1, 1, 2, 1, 15, 2, 17, 2, 2, 1, 1, 2, 1, 14, 2, 1, 2, 1, 1, 3, 2, 3, 1, 2, 3, 2, 1, 2, 17, 1, 2, 2, 1, 2, 2, 3, 15, 1, 3, 13, 3, 1, 1, 2, 3, 14, 1, 13, 2, 2, 1, 14, 2, 1, 2, 2, 1, 17, 12, 12, 1, 1, 2, 1, 1, 1, 2, 2, 13, 13, 1, 17, 1, 2, 17, 1, 1, 1, 1, 1, 1, 2, 14, 1, 1, 3, 13, 12, 12, 2, 1, 1, 2, 2, 3, 3, 1, 3, 1, 3, 1, 1, 2, 2, 13, 1, 2, 2, 12, 12, 2, 2, 12, 17, 1, 17, 2, 12, 17, 17, 1, 17, 12, 17, 1, 17, 2, 1, 13, 2, 2, 1, 1, 2, 2, 1, 1, 17, 2, 3, 17, 3, 1, 1, 1, 2, 1, 12, 3, 2, 2, 12, 17, 17, 12, 12, 13, 2, 3, 17, 2, 1, 3, 2, 2, 3, 17, 17, 12, 2, 3, 2, 2, 2, 3, 1, 3, 2, 1, 12, 2, 2, 3, 2, 3, 1, 2, 2, 17, 12, 7, 12, 12, 1, 1, 1, 2, 2, 1, 1, 1, 12, 13, 17, 17, 1, 17, 1, 1, 1, 1, 13, 1, 2, 17, 2, 1, 2, 2, 2, 2, 2, 12, 3, 1, 1, 2, 2, 2, 2, 1, 1, 13, 14, 1, 1, 2, 2, 2, 13, 17, 2, 1, 2, 15, 2, 2, 1, 2, 13, 1, 2, 1, 2, 17, 2, 1, 2, 2, 1, 6, 13, 1, 1, 1, 1, 2, 1, 1, 2, 1, 17, 2, 2, 13, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 2, 2, 3, 1, 1, 17, 1, 3, 1, 2, 17, 2, 2, 1, 1, 2, 13, 1, 1, 1, 1, 1, 2, 2, 1, 2, 13, 13, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 1, 1, 1, 2, 1, 2, 2, 2, 1, 2, 2, 13, 2, 1, 1, 1, 2, 14, 5, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 13, 1, 2, 2, 2, 2, 2, 2, 1, 1, 3, 2, 4, 2, 2, 1, 15, 2, 2, 1, 12, 1, 3, 1, 14, 1, 2, 1, 1, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 1, 1, 2, 1, 2, 3, 3, 2, 2, 1, 2, 2, 1, 17, 1, 2, 15, 2, 1, 13, 2, 1, 1, 3, 2, 1, 14, 2, 1, 2, 2, 1, 2, 14, 1, 2, 16, 2, 2, 2, 1, 1, 5, 1, 2, 1, 3, 2, 2, 2, 2, 13, 2, 5, 2, 1, 2, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 2, 3, 2, 13, 3, 2, 2, 2, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 1, 12, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 3, 2, 2, 1, 13, 1, 1, 1, 1, 3, 1, 15, 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 13, 2, 14, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 3, 1, 2, 2, 17, 13, 1, 2, 1, 2, 1, 2, 1, 1, 2, 1, 2, 2, 1, 2, 2, 1, 17, 2, 1, 1, 3, 2, 2, 1, 2, 1, 1, 2, 2, 1, 2, 1, 1, 13, 2, 12, 2, 1, 15, 2, 1, 3, 1, 1, 13, 1, 2, 1, 7, 2, 1, 1, 2, 1, 14, 13, 1, 2, 1, 17, 2, 1, 1, 2, 2, 1, 1, 2, 2, 1, 2, 13, 2, 1, 3, 1, 1, 12, 1, 17, 1, 1, 1, 13, 1, 2, 1, 1, 1, 1, 2, 16, 1, 6, 1, 3, 1, 2, 1, 1, 12, 1, 12, 1, 1, 1, 1, 1, 1, 6, 1, 1, 17, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 13, 1, 1, 1, 1, 12, 1, 1, 12, 17, 1, 1, 1, 1, 1, 2, 1, 1, 12, 1, 12, 1, 1, 1, 1, 13, 1, 1, 1, 1, 12, 2, 13, 1, 1, 17, 1, 1, 1, 1, 17, 1, 1, 12, 2, 1, 1, 1, 1, 1, 1, 12, 1, 1, 2, 13, 1, 1, 17, 1, 1, 13, 1, 12, 1, 1, 15, 2, 14, 12, 1, 1, 12, 1, 6, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 15, 1, 1, 12, 1, 1, 2, 14, 1, 1, 1, 1, 12, 1, 13, 15, 13, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 12, 1, 2, 1, 1, 1, 1, 2, 2, 1, 13, 1, 1, 1, 1, 13, 1, 1, 1, 1, 2, 1, 12, 1, 12, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 12, 13, 1, 1, 1, 15, 1, 17, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 13, 1, 17, 1, 1, 1, 15, 12, 1, 14, 1, 1, 14, 1, 1, 1, 1, 1, 1, 1, 1, 13, 1, 13, 1, 7, 1, 12, 14, 1, 13, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 14, 1, 1, 15, 1, 1, 12, 1, 13, 1, 1, 1, 1, 14, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 1, 12, 1, 1, 1, 13, 15, 13, 1, 1, 12, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 1, 17, 16, 1, 1, 1, 1, 1, 1, 1, 12, 14, 1, 13, 1, 12, 1, 14, 1, 1, 1, 1, 1, 1, 14, 2, 1, 1, 1, 1, 1, 1, 1, 1, 12, 12, 1, 1, 1, 1, 1, 1, 14, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 13, 1, 14, 1, 13, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 13, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 1, 17, 6, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 1, 1, 13, 12, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 12, 1, 1, 1, 1, 1, 1, 12, 1, 12, 1, 1, 1, 1, 15, 1, 6, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 6, 1, 1, 2, 1, 2, 7, 1, 17, 2, 1, 1, 1, 1, 1, 2, 12, 2, 2, 2, 1, 14, 2, 17, 15, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 15, 17, 7, 2, 17, 17, 2, 1, 3, 1, 2, 2, 17, 13, 14, 1, 2, 1, 1, 2, 14, 2, 13, 1, 1, 1, 15, 7, 1, 2, 1, 1, 2, 1, 3, 1, 1, 17, 1, 13, 17, 13, 17, 3, 1, 1, 14, 2, 2, 2, 2, 14, 2, 2, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 17, 1, 1, 17, 1, 2, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 3, 1, 17, 1, 1, 13, 17, 1, 2, 1, 2, 2, 1, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 13, 1, 12, 1, 17, 1, 2, 1, 3, 1, 1, 2, 2, 1, 1, 2, 1, 17, 14, 1, 2, 2, 2, 1, 17, 17, 13, 1, 1, 1, 2, 17, 2, 2, 2, 1, 2, 2, 17, 1, 2, 1, 2, 1, 2, 2, 1, 2, 1, 1, 2, 1, 1, 2, 2, 1, 3, 1, 1, 14, 1, 1, 1, 14, 1, 1, 1, 1, 1, 13, 17, 2, 1, 2, 2, 1, 2, 17, 1, 1, 1, 1, 14, 1, 2, 1, 1, 17, 2, 1, 1, 1, 1, 17, 1, 1, 1, 2, 1, 2, 1, 2, 2, 2, 7, 1, 1, 17, 17, 2, 2, 1, 13, 2, 1, 1, 1, 1, 1, 1, 7, 17, 1, 2, 1, 1, 1, 2, 1, 1, 2, 1, 1, 1, 2, 1, 1, 13, 1, 1, 2, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 2, 1, 12, 1, 3, 2, 2, 2, 1, 3, 2, 15, 1, 2, 1, 3, 13, 1, 1, 2, 1, 1, 1, 7, 7, 1, 2, 1, 1, 1, 1, 1, 15, 14, 1, 2, 17, 1, 3, 2, 12, 13, 1, 13, 17, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 7, 17, 1, 2, 15, 1, 2, 7, 1, 1, 2, 1, 17, 1, 1, 17, 2, 13, 1, 1, 1, 2, 2, 13, 1, 1, 1, 1, 1, 17, 1, 2, 2, 14, 1, 13, 1, 1, 2, 1, 1, 1, 13, 1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 3, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 17, 1, 2, 1, 1, 3, 1, 1, 2, 2, 3, 1, 1, 2, 2, 1, 2, 1, 2, 2, 3, 1, 2, 1, 3, 2, 2, 1, 1, 1, 13, 1, 1, 1, 1, 2, 1, 1, 1, 3, 3, 1, 1, 1, 2, 1, 1, 2, 2, 2, 2, 1, 1, 13, 1, 1, 7, 2, 2, 1, 2, 13, 2, 1, 1, 2, 1, 1, 15, 1, 2, 12, 2, 1, 13, 1, 1, 13, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 13, 2, 1, 1, 1, 2, 1, 2, 1, 13, 13, 2, 1, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 13, 2, 1, 1, 2, 15, 1, 2, 1, 1, 2, 1, 1, 13, 1, 1, 2, 13, 1, 1, 2, 1, 1, 2, 1, 1, 1, 2, 1, 13, 2, 2, 2, 15, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 13, 2, 1, 1, 7, 2, 2, 1, 3, 2, 2, 13, 1, 13, 1, 15, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 2, 2, 2, 1, 2, 15, 2, 1, 1, 2, 1, 1, 1, 2, 1, 2, 3, 1, 13, 2, 2, 15, 2, 13, 1, 13, 2, 2, 2, 3, 1, 2, 1, 15, 2, 13, 1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 7, 2, 2, 1, 1, 2, 15, 3, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 17, 2, 2, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 3, 13, 1, 1, 1, 1, 1, 2, 1, 1, 13, 2, 1, 1, 2, 1, 1, 1, 15, 1, 1, 2, 1, 1, 2, 1, 14, 1, 14, 2, 13, 1, 2, 2, 2, 2, 2, 1, 2, 13, 1, 2, 1, 2, 1, 1, 1, 2, 1, 2, 14, 1, 1, 1, 2, 3, 1, 2, 1, 2, 2, 1, 1, 1, 1, 2, 1, 12, 1, 13, 12, 3, 14, 1, 1, 1, 1, 1, 2, 14, 1, 1, 1, 1, 2, 1, 1, 2, 1, 2, 1, 1, 3, 2, 1, 1, 1, 1, 17, 2, 2, 1, 2, 1, 1, 1, 2, 3, 3, 3, 1, 2, 2, 2, 3, 2, 3, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2, 2, 1, 1, 1, 2, 1, 3, 2, 1, 1, 2, 1, 1, 1, 1, 3, 1, 1, 2, 3, 2, 3, 3, 2, 2, 1, 1, 2, 2, 2, 1, 1, 1, 2, 2, 1, 2, 1, 13, 1, 1, 3, 3, 2, 3, 12, 13, 1, 2, 1, 1, 13, 1, 13, 14, 2, 1, 2, 3, 3, 1, 1, 1, 1, 1, 13, 1, 1, 2, 1, 1, 1, 1, 1, 13, 1, 1, 15, 13, 1, 2, 1, 2, 1, 1, 2, 1, 1, 1, 1, 13, 2, 1, 1, 2, 2, 1, 13, 14, 1, 1, 2, 1, 1, 13, 2, 12, 13, 13, 2, 13, 1, 1, 1, 13, 1, 2, 1, 1, 1, 1, 2, 1, 13, 2, 1, 1, 1, 13, 13, 1, 2, 17, 2, 1, 1, 1, 1, 14, 1, 1, 2, 1, 12, 2, 1, 2, 1, 17, 1, 1, 1, 13, 1, 2, 17, 1, 2, 13, 1, 1, 14, 17, 1, 13, 2, 1, 2, 1, 1, 1, 12, 1, 17, 3, 2, 5, 13, 1, 1, 1, 7, 13, 1, 1, 2, 2, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 13, 2, 1, 1, 14, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 17, 1, 1, 1, 2, 1, 13, 1, 2, 1, 17, 14, 1, 1, 1, 2, 1, 12, 1, 13, 1, 2, 1, 1, 1, 2, 13, 1, 3, 12, 17, 1, 2, 1, 1, 1, 12, 2, 12, 13, 1, 17, 2, 2, 2, 1, 1, 1, 7, 1, 1, 2, 2, 1, 1, 2, 2, 1, 2, 13, 1, 1, 1, 15, 2, 17, 1, 2, 13, 1, 1, 13, 17, 1, 1, 1, 2, 14, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 14, 1, 17, 1, 2, 2, 2, 2, 14, 17, 17, 17, 17, 2, 17, 1, 1, 2, 2, 13, 12, 1, 13, 1, 1, 2, 1, 2, 13, 12, 2, 1, 1, 17, 1, 1, 1, 1, 3, 1, 17, 1, 1, 2, 1, 2, 1, 1, 17, 2, 1, 17, 14, 17, 1, 2, 1, 14, 1, 3, 1, 13, 2, 1, 1, 2, 17, 1, 13, 1, 1, 17, 1, 1, 1, 13, 2, 14, 17, 1, 2, 17, 2, 2, 17, 17, 1, 17, 17, 14, 17, 1, 1, 2, 14, 17, 1, 2, 2, 17, 1, 17, 1, 17, 2, 17, 17, 13, 17, 17, 1, 2, 1, 17, 1, 13, 17, 13, 17, 2, 13, 17, 17, 1, 17, 17, 2, 1, 13, 1, 2, 17, 14, 2, 1, 1, 2, 17, 1, 2, 2, 17, 12, 17, 2, 17, 13, 1, 2, 13, 2, 14, 2, 3, 17, 15, 14, 1, 12, 1, 2, 1, 2, 1, 17, 2, 13, 13, 1, 1, 17, 2, 1, 1, 1, 1, 2, 12, 14, 14, 14, 1, 2, 17, 13, 15, 1, 1, 13, 1, 14, 14, 2, 14, 1, 2, 1, 2, 2, 2, 1, 1, 16, 17, 1, 2, 12, 15, 1, 1, 1, 2, 2, 17, 2, 13, 17, 17, 1, 17, 17, 17, 12, 1, 17, 1, 7, 17, 17, 2, 2, 1, 1, 17, 17, 1, 2, 2, 17, 2, 17, 2, 14, 17, 1, 12, 17, 7, 17, 2, 13, 13, 1, 1, 17, 1, 17, 17, 17, 1, 7, 17, 1, 13, 1, 2, 2, 13, 13, 1, 12, 1, 1, 12, 12, 12, 12, 17, 1, 12, 2, 3, 14, 14, 2, 2, 15, 1, 1, 17, 2, 13, 13, 2, 14, 2, 5, 2, 1, 1, 14, 2, 1, 1, 13, 2, 3, 1, 1, 12, 1, 13, 1, 1, 2, 1, 13, 1, 2, 1, 12, 12, 1, 1, 2, 2, 1, 2, 2, 14, 14, 2, 2, 2, 2, 1, 2, 2, 15, 13, 6, 1, 1, 1, 1, 12, 2, 12, 14, 1, 1, 1, 1, 12, 1, 2, 2, 1, 12, 1, 1, 2, 1, 1, 1, 12, 1, 1, 12, 15, 1, 14, 1, 1, 1, 2, 13, 14, 1, 1, 6, 1, 12, 1, 1, 15, 1, 2, 2, 12, 12, 1, 1, 2, 2, 12, 1, 1, 1, 1, 1, 13, 1, 1, 12, 1, 1, 2, 2, 17, 12, 2, 15, 1, 1, 14, 2, 1, 17, 1, 13, 1, 12, 12, 2, 1, 1, 1, 1, 1, 15, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 13, 1, 1, 1, 12, 1, 15, 2, 12, 1, 15, 1, 1, 1, 1, 1, 2, 2, 2, 14, 1, 2, 3, 1, 2, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 3, 1, 2, 2, 1, 1, 1, 2, 15, 1, 2, 15, 2, 12, 2, 13, 1, 1, 17, 13, 1, 12, 2, 1, 1, 1, 1, 1, 14, 1, 12, 2, 1, 1, 5, 2, 2, 13, 13, 14, 1, 15, 2, 2, 2, 13, 2, 1, 2, 5, 1, 1, 1, 2, 1, 1, 1, 1, 2, 2, 1, 3, 1, 1, 2, 1, 2, 2, 15, 12, 1, 1, 1, 1, 17, 1, 2, 2, 3, 1, 1, 2, 2, 2, 17, 1, 1, 1, 1, 12, 1, 2, 3, 17, 1, 1, 1, 3, 1, 15, 1, 12, 12, 2, 2, 2, 1, 1, 2, 1, 1, 1, 14, 2, 2, 15, 1, 2, 2, 1, 1, 15, 1, 1, 1, 1, 1, 2, 1, 1, 17, 2, 1, 12, 1, 2, 1, 1, 3, 1, 1, 1, 14, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 17, 1, 1, 1, 1, 1, 12, 12, 2, 1, 1, 1, 1, 13, 2, 13, 2, 1, 2, 1, 2, 1, 1, 6, 1, 2, 1, 1, 14, 2, 15, 1, 6, 1, 2, 2, 1, 6, 1, 1, }; + var feature = new double[] { 0, 0.693147180559945, 0, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.09861228866811, 0, 0, 0, 1.09861228866811, 0, 0, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 2.07944154167984, 0, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 1.6094379124341, 0, 1.79175946922805, 1.38629436111989, 1.6094379124341, 0, 0, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 0, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 0, 2.19722457733622, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.79175946922805, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0, 0, 0, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 0, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0, 0, 1.6094379124341, 1.38629436111989, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.94591014905531, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0, 2.56494935746154, 0.693147180559945, 0, 0, 0, 0, 0, 1.6094379124341, 0, 0, 0, 0, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0, 2.56494935746154, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 2.07944154167984, 2.30258509299405, 1.38629436111989, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0, 0, 0, 0, 0, 1.09861228866811, 1.09861228866811, 0, 2.19722457733622, 2.30258509299405, 2.70805020110221, 2.07944154167984, 1.94591014905531, 2.39789527279837, 2.30258509299405, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 2.77258872223978, 2.19722457733622, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.09861228866811, 2.19722457733622, 2.30258509299405, 1.79175946922805, 2.07944154167984, 1.94591014905531, 1.38629436111989, 2.19722457733622, 3.04452243772342, 2.07944154167984, 2.19722457733622, 2.07944154167984, 2.484906649788, 0.693147180559945, 1.94591014905531, 1.6094379124341, 2.56494935746154, 1.38629436111989, 2.63905732961526, 1.94591014905531, 2.19722457733622, 1.94591014905531, 1.09861228866811, 1.38629436111989, 1.79175946922805, 2.99573227355399, 0, 2.484906649788, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.19722457733622, 2.56494935746154, 1.79175946922805, 2.83321334405622, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.79175946922805, 2.30258509299405, 2.56494935746154, 2.39789527279837, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.94591014905531, 2.77258872223978, 2.484906649788, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 2.484906649788, 0, 0.693147180559945, 0.693147180559945, 2.30258509299405, 1.94591014905531, 2.56494935746154, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.09861228866811, 0, 1.09861228866811, 2.19722457733622, 2.07944154167984, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.94591014905531, 1.6094379124341, 0.693147180559945, 2.77258872223978, 2.484906649788, 1.38629436111989, 2.39789527279837, 2.39789527279837, 2.19722457733622, 0.693147180559945, 1.6094379124341, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.19722457733622, 2.484906649788, 1.38629436111989, 2.07944154167984, 1.38629436111989, 1.38629436111989, 2.89037175789616, 1.6094379124341, 2.56494935746154, 1.38629436111989, 2.19722457733622, 1.38629436111989, 2.30258509299405, 2.07944154167984, 1.79175946922805, 2.56494935746154, 1.38629436111989, 2.77258872223978, 1.79175946922805, 2.39789527279837, 1.79175946922805, 1.09861228866811, 1.94591014905531, 1.79175946922805, 2.19722457733622, 1.6094379124341, 2.56494935746154, 1.38629436111989, 0.693147180559945, 2.07944154167984, 1.94591014905531, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.39789527279837, 2.39789527279837, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0, 1.79175946922805, 2.83321334405622, 1.79175946922805, 1.79175946922805, 0, 2.30258509299405, 1.38629436111989, 2.77258872223978, 1.94591014905531, 1.09861228866811, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.6094379124341, 2.30258509299405, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.79175946922805, 2.07944154167984, 2.39789527279837, 1.79175946922805, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 1.79175946922805, 2.30258509299405, 1.94591014905531, 1.09861228866811, 3.2188758248682, 1.94591014905531, 1.79175946922805, 0, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.94591014905531, 1.38629436111989, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.56494935746154, 1.6094379124341, 1.09861228866811, 1.94591014905531, 2.07944154167984, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.83321334405622, 1.94591014905531, 2.39789527279837, 2.30258509299405, 2.39789527279837, 1.94591014905531, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.09861228866811, 0.693147180559945, 1.6094379124341, 2.63905732961526, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.94591014905531, 2.30258509299405, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.99573227355399, 2.07944154167984, 2.77258872223978, 1.6094379124341, 0, 2.07944154167984, 1.09861228866811, 1.79175946922805, 2.39789527279837, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.19722457733622, 2.30258509299405, 2.484906649788, 1.94591014905531, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 2.63905732961526, 1.6094379124341, 1.94591014905531, 1.6094379124341, 2.63905732961526, 2.56494935746154, 1.94591014905531, 2.484906649788, 1.79175946922805, 2.07944154167984, 2.07944154167984, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.94591014905531, 0, 1.38629436111989, 2.484906649788, 2.39789527279837, 1.94591014905531, 2.39789527279837, 2.07944154167984, 1.6094379124341, 2.63905732961526, 1.79175946922805, 2.63905732961526, 2.39789527279837, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 1.79175946922805, 1.79175946922805, 2.63905732961526, 2.19722457733622, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.39789527279837, 1.38629436111989, 2.19722457733622, 1.79175946922805, 2.19722457733622, 1.09861228866811, 1.09861228866811, 2.19722457733622, 2.39789527279837, 1.09861228866811, 1.38629436111989, 1.79175946922805, 2.63905732961526, 0, 1.6094379124341, 1.79175946922805, 0, 2.63905732961526, 2.07944154167984, 1.38629436111989, 1.79175946922805, 2.63905732961526, 0.693147180559945, 1.79175946922805, 1.6094379124341, 2.19722457733622, 0, 2.39789527279837, 0.693147180559945, 1.79175946922805, 2.39789527279837, 2.63905732961526, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.70805020110221, 1.09861228866811, 1.79175946922805, 1.09861228866811, 2.56494935746154, 2.19722457733622, 1.6094379124341, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 2.56494935746154, 1.38629436111989, 1.38629436111989, 2.484906649788, 2.30258509299405, 2.30258509299405, 0, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.30258509299405, 1.94591014905531, 1.79175946922805, 0, 2.77258872223978, 2.56494935746154, 2.30258509299405, 2.19722457733622, 2.30258509299405, 1.6094379124341, 2.30258509299405, 1.79175946922805, 2.19722457733622, 0.693147180559945, 1.79175946922805, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.09861228866811, 2.07944154167984, 1.09861228866811, 2.30258509299405, 0.693147180559945, 2.83321334405622, 2.83321334405622, 2.83321334405622, 2.07944154167984, 2.77258872223978, 2.30258509299405, 1.38629436111989, 0.693147180559945, 2.56494935746154, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.79175946922805, 3.09104245335832, 2.30258509299405, 2.56494935746154, 1.38629436111989, 2.07944154167984, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.89037175789616, 2.19722457733622, 1.79175946922805, 2.07944154167984, 1.09861228866811, 0, 0, 0, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.70805020110221, 2.83321334405622, 1.79175946922805, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 2.39789527279837, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.07944154167984, 2.56494935746154, 2.07944154167984, 1.38629436111989, 2.70805020110221, 1.6094379124341, 2.484906649788, 1.79175946922805, 1.94591014905531, 2.70805020110221, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 0, 0, 1.38629436111989, 2.63905732961526, 1.79175946922805, 0.693147180559945, 1.94591014905531, 1.79175946922805, 2.07944154167984, 2.56494935746154, 0.693147180559945, 2.70805020110221, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.6094379124341, 0, 1.38629436111989, 2.30258509299405, 1.79175946922805, 0, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.484906649788, 2.30258509299405, 1.6094379124341, 1.79175946922805, 3.17805383034795, 2.70805020110221, 1.6094379124341, 2.19722457733622, 0.693147180559945, 2.70805020110221, 2.07944154167984, 2.63905732961526, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.56494935746154, 2.07944154167984, 1.94591014905531, 2.39789527279837, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0, 1.09861228866811, 2.484906649788, 2.19722457733622, 0, 1.94591014905531, 2.07944154167984, 0, 0, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.38629436111989, 2.39789527279837, 2.56494935746154, 0.693147180559945, 2.30258509299405, 1.94591014905531, 2.07944154167984, 2.30258509299405, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.6094379124341, 0.693147180559945, 0.693147180559945, 2.83321334405622, 2.70805020110221, 0.693147180559945, 1.94591014905531, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0, 2.484906649788, 1.38629436111989, 1.79175946922805, 2.484906649788, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.77258872223978, 2.63905732961526, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.89037175789616, 0, 0, 2.39789527279837, 1.38629436111989, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.39789527279837, 2.56494935746154, 1.6094379124341, 2.56494935746154, 2.63905732961526, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.6094379124341, 0, 0, 0, 0, 0.693147180559945, 1.94591014905531, 2.39789527279837, 0.693147180559945, 1.6094379124341, 1.94591014905531, 2.56494935746154, 2.39789527279837, 2.63905732961526, 0, 1.09861228866811, 0, 0, 2.07944154167984, 1.79175946922805, 2.63905732961526, 0.693147180559945, 1.09861228866811, 2.39789527279837, 0.693147180559945, 2.63905732961526, 2.07944154167984, 1.79175946922805, 1.38629436111989, 0, 1.79175946922805, 2.19722457733622, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.6094379124341, 2.39789527279837, 2.30258509299405, 1.09861228866811, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.6094379124341, 0.693147180559945, 2.30258509299405, 0, 1.94591014905531, 2.30258509299405, 1.94591014905531, 0.693147180559945, 2.77258872223978, 0, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 2.77258872223978, 2.63905732961526, 2.63905732961526, 1.6094379124341, 2.19722457733622, 2.484906649788, 2.484906649788, 1.79175946922805, 0.693147180559945, 2.484906649788, 2.30258509299405, 2.39789527279837, 2.484906649788, 1.94591014905531, 2.30258509299405, 1.6094379124341, 1.94591014905531, 1.09861228866811, 2.56494935746154, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 1.79175946922805, 0, 0, 0, 1.79175946922805, 0, 1.6094379124341, 0, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0, 2.56494935746154, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.484906649788, 0, 1.38629436111989, 1.94591014905531, 2.19722457733622, 2.19722457733622, 1.79175946922805, 1.79175946922805, 2.484906649788, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.94591014905531, 2.19722457733622, 1.94591014905531, 2.19722457733622, 3.04452243772342, 0.693147180559945, 2.30258509299405, 2.56494935746154, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.09861228866811, 2.94443897916644, 1.79175946922805, 2.83321334405622, 2.07944154167984, 1.38629436111989, 1.38629436111989, 2.19722457733622, 2.30258509299405, 2.30258509299405, 1.94591014905531, 1.94591014905531, 2.99573227355399, 2.30258509299405, 2.30258509299405, 2.30258509299405, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.39789527279837, 2.83321334405622, 1.09861228866811, 0, 2.30258509299405, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.30258509299405, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.94591014905531, 2.39789527279837, 1.94591014905531, 3.04452243772342, 1.94591014905531, 2.484906649788, 0.693147180559945, 2.07944154167984, 1.6094379124341, 2.77258872223978, 0, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0, 2.63905732961526, 1.38629436111989, 0, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.94591014905531, 2.484906649788, 1.79175946922805, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.99573227355399, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.07944154167984, 2.39789527279837, 0.693147180559945, 2.19722457733622, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 3.04452243772342, 1.94591014905531, 0.693147180559945, 2.19722457733622, 0, 0, 0.693147180559945, 0, 2.56494935746154, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.39789527279837, 1.38629436111989, 2.07944154167984, 2.77258872223978, 1.6094379124341, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.09861228866811, 2.30258509299405, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.6094379124341, 2.39789527279837, 2.07944154167984, 1.79175946922805, 2.63905732961526, 1.6094379124341, 2.94443897916644, 1.38629436111989, 2.19722457733622, 2.484906649788, 1.09861228866811, 1.38629436111989, 1.38629436111989, 2.89037175789616, 1.94591014905531, 1.09861228866811, 1.09861228866811, 2.94443897916644, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0.693147180559945, 2.39789527279837, 1.38629436111989, 2.94443897916644, 1.38629436111989, 1.38629436111989, 1.6094379124341, 2.56494935746154, 3.04452243772342, 2.39789527279837, 2.30258509299405, 1.94591014905531, 2.19722457733622, 1.38629436111989, 1.38629436111989, 0, 1.38629436111989, 1.6094379124341, 2.07944154167984, 2.07944154167984, 2.30258509299405, 1.38629436111989, 1.09861228866811, 2.77258872223978, 1.38629436111989, 3.04452243772342, 2.39789527279837, 1.38629436111989, 0.693147180559945, 1.94591014905531, 2.30258509299405, 2.63905732961526, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.94591014905531, 1.38629436111989, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.6094379124341, 1.6094379124341, 0.693147180559945, 2.77258872223978, 0, 2.19722457733622, 2.484906649788, 1.09861228866811, 2.56494935746154, 0.693147180559945, 1.6094379124341, 2.39789527279837, 0.693147180559945, 2.70805020110221, 1.09861228866811, 1.6094379124341, 2.30258509299405, 2.07944154167984, 0.693147180559945, 2.83321334405622, 1.94591014905531, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 1.94591014905531, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.6094379124341, 2.19722457733622, 2.63905732961526, 1.6094379124341, 2.19722457733622, 1.94591014905531, 2.484906649788, 2.70805020110221, 1.09861228866811, 2.07944154167984, 2.39789527279837, 2.484906649788, 2.39789527279837, 1.38629436111989, 2.19722457733622, 2.07944154167984, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.83321334405622, 1.09861228866811, 2.94443897916644, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.6094379124341, 3.09104245335832, 1.79175946922805, 0.693147180559945, 1.09861228866811, 2.94443897916644, 0.693147180559945, 0.693147180559945, 2.39789527279837, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.94591014905531, 0.693147180559945, 2.07944154167984, 1.94591014905531, 0, 2.07944154167984, 1.6094379124341, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.94591014905531, 0, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.07944154167984, 2.07944154167984, 0, 2.07944154167984, 2.07944154167984, 1.79175946922805, 2.30258509299405, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.07944154167984, 2.83321334405622, 2.07944154167984, 2.07944154167984, 2.484906649788, 1.6094379124341, 1.09861228866811, 2.30258509299405, 2.30258509299405, 2.70805020110221, 2.19722457733622, 1.79175946922805, 2.19722457733622, 1.94591014905531, 2.39789527279837, 2.39789527279837, 2.07944154167984, 1.79175946922805, 2.63905732961526, 1.6094379124341, 2.30258509299405, 1.6094379124341, 0.693147180559945, 2.30258509299405, 2.30258509299405, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.07944154167984, 2.63905732961526, 2.07944154167984, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.09861228866811, 2.77258872223978, 2.39789527279837, 1.6094379124341, 0.693147180559945, 1.6094379124341, 0, 2.99573227355399, 0.693147180559945, 0.693147180559945, 1.79175946922805, 2.484906649788, 1.38629436111989, 0, 2.07944154167984, 0, 1.09861228866811, 1.09861228866811, 2.19722457733622, 1.94591014905531, 1.94591014905531, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.94591014905531, 2.30258509299405, 2.07944154167984, 1.6094379124341, 1.94591014905531, 2.07944154167984, 2.07944154167984, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.484906649788, 1.09861228866811, 1.6094379124341, 1.94591014905531, 2.70805020110221, 1.09861228866811, 2.63905732961526, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 2.19722457733622, 2.07944154167984, 1.09861228866811, 1.94591014905531, 2.39789527279837, 2.99573227355399, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.70805020110221, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 2.63905732961526, 2.63905732961526, 1.94591014905531, 1.79175946922805, 2.07944154167984, 1.79175946922805, 2.07944154167984, 2.484906649788, 1.94591014905531, 1.6094379124341, 2.07944154167984, 1.38629436111989, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.09861228866811, 1.6094379124341, 2.94443897916644, 1.79175946922805, 0, 0.693147180559945, 0, 2.19722457733622, 0.693147180559945, 1.94591014905531, 2.07944154167984, 1.79175946922805, 1.38629436111989, 2.19722457733622, 1.09861228866811, 1.94591014905531, 2.19722457733622, 1.09861228866811, 2.07944154167984, 1.6094379124341, 2.19722457733622, 1.94591014905531, 0.693147180559945, 1.94591014905531, 2.19722457733622, 3.04452243772342, 1.6094379124341, 2.39789527279837, 0, 2.77258872223978, 1.6094379124341, 2.30258509299405, 1.38629436111989, 1.6094379124341, 2.07944154167984, 2.30258509299405, 1.38629436111989, 0.693147180559945, 2.39789527279837, 0, 1.94591014905531, 2.19722457733622, 1.09861228866811, 1.79175946922805, 2.07944154167984, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0, 2.19722457733622, 1.09861228866811, 2.30258509299405, 2.07944154167984, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.19722457733622, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.6094379124341, 2.30258509299405, 2.07944154167984, 1.09861228866811, 2.70805020110221, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 2.07944154167984, 1.79175946922805, 2.39789527279837, 1.94591014905531, 1.79175946922805, 2.39789527279837, 1.6094379124341, 2.39789527279837, 2.07944154167984, 1.09861228866811, 1.6094379124341, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.09861228866811, 2.19722457733622, 1.6094379124341, 2.07944154167984, 0, 1.6094379124341, 2.89037175789616, 1.38629436111989, 2.99573227355399, 2.30258509299405, 1.94591014905531, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.94591014905531, 2.30258509299405, 2.56494935746154, 2.77258872223978, 0.693147180559945, 2.99573227355399, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.6094379124341, 2.63905732961526, 0, 3.36729582998647, 1.6094379124341, 2.07944154167984, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.63905732961526, 0.693147180559945, 1.94591014905531, 2.56494935746154, 1.79175946922805, 2.30258509299405, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.94591014905531, 2.19722457733622, 2.70805020110221, 2.07944154167984, 2.70805020110221, 0, 1.38629436111989, 2.07944154167984, 1.79175946922805, 2.19722457733622, 1.94591014905531, 2.30258509299405, 2.30258509299405, 0.693147180559945, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.6094379124341, 2.07944154167984, 2.30258509299405, 2.07944154167984, 2.77258872223978, 1.79175946922805, 1.09861228866811, 1.79175946922805, 2.56494935746154, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.79175946922805, 2.30258509299405, 2.07944154167984, 2.07944154167984, 1.94591014905531, 0, 2.19722457733622, 1.79175946922805, 1.79175946922805, 1.94591014905531, 2.39789527279837, 2.30258509299405, 1.6094379124341, 2.484906649788, 2.39789527279837, 1.79175946922805, 2.77258872223978, 2.56494935746154, 1.09861228866811, 1.38629436111989, 2.484906649788, 2.19722457733622, 1.6094379124341, 1.09861228866811, 2.39789527279837, 1.09861228866811, 2.39789527279837, 1.94591014905531, 3.29583686600433, 1.38629436111989, 2.56494935746154, 0.693147180559945, 1.38629436111989, 2.19722457733622, 2.56494935746154, 2.19722457733622, 1.6094379124341, 2.19722457733622, 1.38629436111989, 1.94591014905531, 2.77258872223978, 1.38629436111989, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.30258509299405, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.09861228866811, 2.07944154167984, 2.70805020110221, 2.07944154167984, 1.79175946922805, 2.19722457733622, 2.63905732961526, 0.693147180559945, 2.19722457733622, 0.693147180559945, 2.19722457733622, 0.693147180559945, 2.07944154167984, 1.94591014905531, 2.70805020110221, 1.6094379124341, 1.6094379124341, 1.79175946922805, 2.39789527279837, 2.70805020110221, 1.09861228866811, 2.484906649788, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 1.6094379124341, 0, 2.63905732961526, 2.30258509299405, 1.09861228866811, 1.94591014905531, 0, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.79175946922805, 1.6094379124341, 2.07944154167984, 1.38629436111989, 2.39789527279837, 1.94591014905531, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.79175946922805, 1.09861228866811, 2.70805020110221, 2.484906649788, 1.94591014905531, 1.6094379124341, 1.38629436111989, 2.19722457733622, 1.6094379124341, 2.77258872223978, 2.77258872223978, 2.39789527279837, 2.39789527279837, 0.693147180559945, 2.484906649788, 2.484906649788, 1.6094379124341, 2.39789527279837, 1.09861228866811, 0.693147180559945, 2.19722457733622, 2.39789527279837, 2.484906649788, 2.19722457733622, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.07944154167984, 2.30258509299405, 2.30258509299405, 2.19722457733622, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 3.09104245335832, 2.70805020110221, 2.19722457733622, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.56494935746154, 1.09861228866811, 1.94591014905531, 2.70805020110221, 2.484906649788, 1.38629436111989, 2.19722457733622, 1.6094379124341, 2.484906649788, 1.38629436111989, 2.30258509299405, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.79175946922805, 2.19722457733622, 1.38629436111989, 0.693147180559945, 2.39789527279837, 0, 2.07944154167984, 2.94443897916644, 2.89037175789616, 2.30258509299405, 1.38629436111989, 1.6094379124341, 1.6094379124341, 2.39789527279837, 2.63905732961526, 2.484906649788, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.6094379124341, 2.63905732961526, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 2.19722457733622, 1.94591014905531, 1.6094379124341, 2.56494935746154, 0.693147180559945, 2.30258509299405, 0.693147180559945, 2.56494935746154, 2.56494935746154, 2.30258509299405, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.77258872223978, 0, 0.693147180559945, 2.07944154167984, 2.30258509299405, 2.07944154167984, 1.09861228866811, 2.30258509299405, 2.77258872223978, 2.07944154167984, 1.79175946922805, 1.38629436111989, 1.94591014905531, 2.89037175789616, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.6094379124341, 2.70805020110221, 1.38629436111989, 2.39789527279837, 2.30258509299405, 1.79175946922805, 1.6094379124341, 2.07944154167984, 2.77258872223978, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.56494935746154, 2.89037175789616, 1.09861228866811, 2.63905732961526, 2.56494935746154, 1.09861228866811, 2.30258509299405, 1.94591014905531, 2.39789527279837, 2.07944154167984, 1.79175946922805, 2.63905732961526, 3.29583686600433, 1.6094379124341, 2.19722457733622, 2.30258509299405, 1.38629436111989, 1.38629436111989, 2.484906649788, 2.07944154167984, 1.79175946922805, 1.09861228866811, 2.07944154167984, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.83321334405622, 1.09861228866811, 1.6094379124341, 2.07944154167984, 2.39789527279837, 2.30258509299405, 0, 1.94591014905531, 0.693147180559945, 2.484906649788, 2.39789527279837, 0, 0, 2.07944154167984, 1.09861228866811, 2.07944154167984, 2.63905732961526, 2.484906649788, 2.07944154167984, 1.38629436111989, 2.30258509299405, 2.07944154167984, 2.30258509299405, 3.09104245335832, 2.63905732961526, 1.09861228866811, 0, 2.94443897916644, 1.79175946922805, 1.94591014905531, 2.30258509299405, 2.30258509299405, 2.07944154167984, 2.39789527279837, 2.484906649788, 1.6094379124341, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0.693147180559945, 1.38629436111989, 2.19722457733622, 0, 2.19722457733622, 2.484906649788, 0.693147180559945, 1.79175946922805, 0.693147180559945, 2.30258509299405, 1.38629436111989, 1.94591014905531, 0, 0, 1.79175946922805, 0.693147180559945, 1.79175946922805, 0, 1.94591014905531, 1.6094379124341, 2.56494935746154, 2.07944154167984, 2.07944154167984, 0, 1.94591014905531, 2.39789527279837, 1.38629436111989, 1.38629436111989, 2.39789527279837, 2.39789527279837, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 2.07944154167984, 1.38629436111989, 2.77258872223978, 1.38629436111989, 2.39789527279837, 2.70805020110221, 1.38629436111989, 1.94591014905531, 1.94591014905531, 1.6094379124341, 2.19722457733622, 2.19722457733622, 2.07944154167984, 1.38629436111989, 0.693147180559945, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.70805020110221, 2.39789527279837, 2.63905732961526, 2.19722457733622, 1.09861228866811, 1.6094379124341, 1.94591014905531, 1.09861228866811, 2.19722457733622, 2.63905732961526, 2.56494935746154, 2.39789527279837, 0.693147180559945, 2.07944154167984, 2.19722457733622, 1.94591014905531, 2.07944154167984, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 2.63905732961526, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.94591014905531, 2.70805020110221, 0, 1.38629436111989, 2.77258872223978, 1.79175946922805, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.09861228866811, 1.79175946922805, 1.09861228866811, 2.30258509299405, 2.89037175789616, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.09861228866811, 2.39789527279837, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.09861228866811, 2.70805020110221, 1.94591014905531, 2.07944154167984, 1.6094379124341, 0, 1.79175946922805, 2.19722457733622, 1.94591014905531, 0.693147180559945, 0, 2.30258509299405, 2.30258509299405, 2.07944154167984, 1.79175946922805, 2.70805020110221, 1.38629436111989, 2.63905732961526, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.19722457733622, 2.19722457733622, 0, 1.38629436111989, 1.79175946922805, 2.19722457733622, 0.693147180559945, 1.94591014905531, 1.79175946922805, 2.19722457733622, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.94591014905531, 0, 0, 2.89037175789616, 0, 0, 1.6094379124341, 3.09104245335832, 1.38629436111989, 2.30258509299405, 2.63905732961526, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0, 0, 1.09861228866811, 2.39789527279837, 0.693147180559945, 0, 1.94591014905531, 2.39789527279837, 1.38629436111989, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 2.70805020110221, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.6094379124341, 0, 2.77258872223978, 2.99573227355399, 0, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.94591014905531, 1.09861228866811, 1.79175946922805, 0.693147180559945, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.19722457733622, 0, 1.09861228866811, 1.6094379124341, 1.6094379124341, 0, 2.83321334405622, 2.39789527279837, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.38629436111989, 2.63905732961526, 2.07944154167984, 0.693147180559945, 0, 1.94591014905531, 2.39789527279837, 1.38629436111989, 0, 1.94591014905531, 2.07944154167984, 2.07944154167984, 1.6094379124341, 2.19722457733622, 2.39789527279837, 1.79175946922805, 0, 1.79175946922805, 2.484906649788, 2.63905732961526, 2.19722457733622, 2.07944154167984, 2.07944154167984, 2.30258509299405, 2.77258872223978, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.09861228866811, 2.19722457733622, 2.56494935746154, 1.79175946922805, 0.693147180559945, 1.94591014905531, 0, 0.693147180559945, 2.99573227355399, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.07944154167984, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 2.07944154167984, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 0, 1.38629436111989, 2.63905732961526, 2.07944154167984, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.6094379124341, 0, 2.30258509299405, 1.38629436111989, 0, 0, 0, 1.94591014905531, 1.6094379124341, 2.56494935746154, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.07944154167984, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 2.30258509299405, 1.94591014905531, 0, 2.484906649788, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.07944154167984, 2.19722457733622, 1.09861228866811, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.79175946922805, 3.09104245335832, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.94591014905531, 1.6094379124341, 2.63905732961526, 1.38629436111989, 2.484906649788, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.56494935746154, 1.94591014905531, 2.94443897916644, 2.39789527279837, 1.6094379124341, 1.38629436111989, 0.693147180559945, 2.83321334405622, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.79175946922805, 2.30258509299405, 1.79175946922805, 1.94591014905531, 2.07944154167984, 2.30258509299405, 1.94591014905531, 2.19722457733622, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 2.07944154167984, 2.19722457733622, 2.39789527279837, 0.693147180559945, 2.70805020110221, 1.94591014905531, 2.484906649788, 2.30258509299405, 1.94591014905531, 2.30258509299405, 1.94591014905531, 2.484906649788, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 2.83321334405622, 2.19722457733622, 2.07944154167984, 1.6094379124341, 2.07944154167984, 2.19722457733622, 1.94591014905531, 2.19722457733622, 2.89037175789616, 2.07944154167984, 2.19722457733622, 2.77258872223978, 2.484906649788, 1.6094379124341, 2.07944154167984, 2.56494935746154, 1.38629436111989, 2.30258509299405, 1.94591014905531, 2.56494935746154, 1.6094379124341, 1.38629436111989, 2.30258509299405, 1.94591014905531, 1.79175946922805, 2.19722457733622, 2.19722457733622, 1.09861228866811, 2.19722457733622, 2.19722457733622, 1.09861228866811, 2.19722457733622, 2.39789527279837, 1.6094379124341, 2.07944154167984, 2.77258872223978, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.38629436111989, 2.70805020110221, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0.693147180559945, 2.70805020110221, 1.94591014905531, 1.79175946922805, 2.19722457733622, 2.83321334405622, 1.09861228866811, 1.79175946922805, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.56494935746154, 2.39789527279837, 2.19722457733622, 1.6094379124341, 2.30258509299405, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.6094379124341, 2.07944154167984, 1.79175946922805, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.6094379124341, 2.484906649788, 1.79175946922805, 0, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.30258509299405, 1.94591014905531, 0.693147180559945, 0.693147180559945, 2.39789527279837, 2.07944154167984, 1.79175946922805, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.19722457733622, 2.19722457733622, 2.30258509299405, 1.79175946922805, 1.79175946922805, 1.6094379124341, 3.17805383034795, 1.94591014905531, 0.693147180559945, 1.94591014905531, 2.19722457733622, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.19722457733622, 1.94591014905531, 2.07944154167984, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.94591014905531, 2.07944154167984, 1.09861228866811, 2.07944154167984, 2.77258872223978, 2.30258509299405, 2.19722457733622, 0.693147180559945, 2.30258509299405, 3.2188758248682, 1.6094379124341, 1.94591014905531, 1.94591014905531, 1.79175946922805, 2.63905732961526, 2.83321334405622, 0, 1.09861228866811, 2.39789527279837, 1.79175946922805, 2.30258509299405, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.94591014905531, 2.70805020110221, 2.07944154167984, 1.94591014905531, 2.70805020110221, 1.94591014905531, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.19722457733622, 2.70805020110221, 0, 1.6094379124341, 2.39789527279837, 1.94591014905531, 1.38629436111989, 0.693147180559945, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.38629436111989, 2.19722457733622, 1.94591014905531, 1.6094379124341, 1.6094379124341, 1.6094379124341, 2.77258872223978, 2.19722457733622, 2.30258509299405, 1.79175946922805, 1.79175946922805, 1.79175946922805, 2.30258509299405, 1.6094379124341, 1.79175946922805, 2.19722457733622, 0, 1.09861228866811, 0, 2.63905732961526, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.484906649788, 1.79175946922805, 0.693147180559945, 1.38629436111989, 2.484906649788, 0, 1.94591014905531, 1.6094379124341, 1.09861228866811, 2.19722457733622, 0, 1.79175946922805, 2.56494935746154, 0.693147180559945, 1.79175946922805, 1.79175946922805, 2.19722457733622, 1.6094379124341, 1.79175946922805, 2.77258872223978, 0, 2.07944154167984, 2.484906649788, 1.09861228866811, 1.94591014905531, 1.94591014905531, 1.79175946922805, 1.94591014905531, 1.09861228866811, 2.19722457733622, 2.484906649788, 0.693147180559945, 1.09861228866811, 2.77258872223978, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.09861228866811, 1.09861228866811, 2.39789527279837, 2.63905732961526, 1.38629436111989, 2.19722457733622, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.79175946922805, 2.89037175789616, 1.6094379124341, 3.04452243772342, 2.63905732961526, 1.94591014905531, 2.30258509299405, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.70805020110221, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.39789527279837, 1.09861228866811, 1.6094379124341, 1.94591014905531, 0, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 2.484906649788, 1.79175946922805, 2.70805020110221, 3.04452243772342, 2.484906649788, 3.04452243772342, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 0.693147180559945, 2.19722457733622, 2.07944154167984, 0.693147180559945, 1.79175946922805, 2.30258509299405, 1.79175946922805, 0, 1.79175946922805, 2.99573227355399, 1.38629436111989, 1.38629436111989, 1.94591014905531, 2.07944154167984, 2.63905732961526, 2.30258509299405, 2.19722457733622, 1.79175946922805, 2.19722457733622, 0.693147180559945, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.09861228866811, 2.07944154167984, 2.70805020110221, 1.38629436111989, 0, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 2.56494935746154, 1.6094379124341, 0, 1.94591014905531, 1.6094379124341, 0, 2.19722457733622, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.63905732961526, 0, 1.94591014905531, 1.79175946922805, 2.56494935746154, 1.94591014905531, 2.19722457733622, 1.79175946922805, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.07944154167984, 1.38629436111989, 1.38629436111989, 2.07944154167984, 3.17805383034795, 1.94591014905531, 1.6094379124341, 1.94591014905531, 1.79175946922805, 0.693147180559945, 2.484906649788, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 2.39789527279837, 0, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.94591014905531, 2.30258509299405, 2.19722457733622, 0, 1.79175946922805, 0, 2.07944154167984, 2.39789527279837, 1.09861228866811, 1.79175946922805, 1.6094379124341, 1.38629436111989, 2.30258509299405, 2.19722457733622, 0, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.6094379124341, 2.19722457733622, 2.39789527279837, 1.6094379124341, 2.39789527279837, 2.484906649788, 2.484906649788, 2.39789527279837, 1.79175946922805, 2.30258509299405, 2.39789527279837, 2.19722457733622, 1.6094379124341, 2.39789527279837, 2.39789527279837, 2.30258509299405, 1.09861228866811, 2.07944154167984, 0.693147180559945, 2.89037175789616, 1.94591014905531, 1.6094379124341, 1.38629436111989, 2.39789527279837, 1.09861228866811, 2.07944154167984, 2.63905732961526, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.56494935746154, 2.56494935746154, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 2.56494935746154, 1.09861228866811, 2.30258509299405, 2.30258509299405, 2.484906649788, 2.70805020110221, 1.6094379124341, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 2.30258509299405, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.39789527279837, 0, 2.70805020110221, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.38629436111989, 2.30258509299405, 1.6094379124341, 1.38629436111989, 1.6094379124341, 2.39789527279837, 1.79175946922805, 1.38629436111989, 0, 1.38629436111989, 1.94591014905531, 2.39789527279837, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0.693147180559945, 2.07944154167984, 2.19722457733622, 2.63905732961526, 2.07944154167984, 1.94591014905531, 0.693147180559945, 2.07944154167984, 2.07944154167984, 2.484906649788, 1.79175946922805, 2.07944154167984, 1.94591014905531, 2.19722457733622, 1.09861228866811, 0, 1.6094379124341, 1.6094379124341, 0, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.6094379124341, 2.07944154167984, 2.19722457733622, 0.693147180559945, 1.09861228866811, 2.63905732961526, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.38629436111989, 2.94443897916644, 2.07944154167984, 1.09861228866811, 2.94443897916644, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.6094379124341, 2.30258509299405, 1.09861228866811, 0, 1.94591014905531, 2.30258509299405, 1.6094379124341, 2.30258509299405, 1.94591014905531, 1.09861228866811, 2.83321334405622, 1.09861228866811, 0, 2.39789527279837, 2.30258509299405, 1.6094379124341, 1.79175946922805, 1.94591014905531, 2.19722457733622, 2.89037175789616, 1.94591014905531, 2.19722457733622, 2.19722457733622, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.30258509299405, 1.6094379124341, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.79175946922805, 2.56494935746154, 2.30258509299405, 1.6094379124341, 0.693147180559945, 0, 2.70805020110221, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.30258509299405, 2.07944154167984, 2.30258509299405, 0, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 2.19722457733622, 2.63905732961526, 1.94591014905531, 2.30258509299405, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 2.39789527279837, 1.6094379124341, 1.94591014905531, 1.09861228866811, 2.30258509299405, 1.38629436111989, 2.07944154167984, 1.79175946922805, 1.6094379124341, 2.77258872223978, 2.19722457733622, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.07944154167984, 0.693147180559945, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.63905732961526, 1.38629436111989, 2.30258509299405, 2.07944154167984, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 2.07944154167984, 1.79175946922805, 2.30258509299405, 2.56494935746154, 1.79175946922805, 0, 0, 0.693147180559945, 2.19722457733622, 2.19722457733622, 2.484906649788, 0.693147180559945, 1.94591014905531, 1.38629436111989, 2.56494935746154, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 0, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 2.30258509299405, 2.07944154167984, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.38629436111989, 1.94591014905531, 1.6094379124341, 1.79175946922805, 2.70805020110221, 0.693147180559945, 2.99573227355399, 2.30258509299405, 1.94591014905531, 2.56494935746154, 1.79175946922805, 1.38629436111989, 1.6094379124341, 2.56494935746154, 2.39789527279837, 2.56494935746154, 1.6094379124341, 2.484906649788, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.79175946922805, 2.56494935746154, 1.6094379124341, 1.6094379124341, 2.19722457733622, 2.484906649788, 1.79175946922805, 1.79175946922805, 1.09861228866811, 2.19722457733622, 0.693147180559945, 2.484906649788, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.79175946922805, 2.83321334405622, 1.6094379124341, 1.79175946922805, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.56494935746154, 1.09861228866811, 2.07944154167984, 2.484906649788, 1.79175946922805, 1.79175946922805, 1.94591014905531, 2.07944154167984, 1.94591014905531, 2.19722457733622, 1.6094379124341, 2.07944154167984, 1.6094379124341, 2.484906649788, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0, 1.94591014905531, 2.19722457733622, 0.693147180559945, 2.484906649788, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.79175946922805, 2.70805020110221, 0.693147180559945, 2.19722457733622, 2.07944154167984, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.6094379124341, 2.07944154167984, 2.30258509299405, 1.09861228866811, 1.38629436111989, 2.484906649788, 0, 2.63905732961526, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.38629436111989, 2.19722457733622, 3.17805383034795, 0.693147180559945, 2.484906649788, 1.6094379124341, 0.693147180559945, 0.693147180559945, 2.19722457733622, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.484906649788, 2.56494935746154, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.38629436111989, 2.484906649788, 1.6094379124341, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.484906649788, 1.38629436111989, 1.38629436111989, 2.30258509299405, 1.38629436111989, 2.39789527279837, 2.39789527279837, 2.19722457733622, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.94591014905531, 2.484906649788, 1.94591014905531, 3.04452243772342, 1.94591014905531, 2.07944154167984, 2.484906649788, 1.09861228866811, 1.6094379124341, 2.19722457733622, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.6094379124341, 2.70805020110221, 1.6094379124341, 2.94443897916644, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.39789527279837, 1.38629436111989, 0, 2.07944154167984, 1.38629436111989, 2.19722457733622, 1.79175946922805, 1.94591014905531, 2.39789527279837, 2.63905732961526, 0.693147180559945, 1.38629436111989, 2.70805020110221, 2.484906649788, 1.94591014905531, 1.79175946922805, 0, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.79175946922805, 0.693147180559945, 2.19722457733622, 0, 1.38629436111989, 1.38629436111989, 0.693147180559945, 2.30258509299405, 1.38629436111989, 2.07944154167984, 0, 1.38629436111989, 1.38629436111989, 2.39789527279837, 2.30258509299405, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.6094379124341, 2.56494935746154, 1.79175946922805, 1.09861228866811, 2.484906649788, 2.07944154167984, 1.6094379124341, 1.94591014905531, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.30258509299405, 2.39789527279837, 2.19722457733622, 2.19722457733622, 1.94591014905531, 0.693147180559945, 2.07944154167984, 2.19722457733622, 2.77258872223978, 2.56494935746154, 2.56494935746154, 1.6094379124341, 2.19722457733622, 1.09861228866811, 2.77258872223978, 1.79175946922805, 2.30258509299405, 2.19722457733622, 2.07944154167984, 1.09861228866811, 1.6094379124341, 0, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.6094379124341, 2.56494935746154, 1.6094379124341, 1.6094379124341, 2.07944154167984, 2.94443897916644, 1.79175946922805, 2.39789527279837, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.38629436111989, 2.39789527279837, 1.38629436111989, 1.6094379124341, 2.30258509299405, 2.07944154167984, 2.484906649788, 1.94591014905531, 1.6094379124341, 2.30258509299405, 1.38629436111989, 1.94591014905531, 2.19722457733622, 2.89037175789616, 0.693147180559945, 1.6094379124341, 2.30258509299405, 2.39789527279837, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.39789527279837, 1.94591014905531, 1.09861228866811, 2.56494935746154, 1.79175946922805, 2.89037175789616, 1.38629436111989, 2.30258509299405, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.19722457733622, 0.693147180559945, 1.6094379124341, 3.13549421592915, 2.30258509299405, 2.70805020110221, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 2.99573227355399, 1.6094379124341, 2.94443897916644, 2.39789527279837, 2.89037175789616, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.79175946922805, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.38629436111989, 1.38629436111989, 2.07944154167984, 1.38629436111989, 2.63905732961526, 1.94591014905531, 1.09861228866811, 3.09104245335832, 2.19722457733622, 2.39789527279837, 2.83321334405622, 1.38629436111989, 1.94591014905531, 1.6094379124341, 2.77258872223978, 1.09861228866811, 2.63905732961526, 1.6094379124341, 2.484906649788, 2.19722457733622, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.38629436111989, 2.39789527279837, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 2.30258509299405, 1.38629436111989, 1.79175946922805, 1.94591014905531, 2.19722457733622, 1.79175946922805, 1.94591014905531, 2.30258509299405, 1.94591014905531, 2.19722457733622, 1.94591014905531, 1.38629436111989, 2.56494935746154, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.38629436111989, 2.77258872223978, 1.6094379124341, 1.79175946922805, 1.94591014905531, 2.19722457733622, 2.19722457733622, 1.94591014905531, 2.77258872223978, 1.79175946922805, 2.07944154167984, 2.19722457733622, 1.6094379124341, 1.38629436111989, 1.38629436111989, 0, 1.38629436111989, 2.39789527279837, 2.39789527279837, 2.63905732961526, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0, 2.39789527279837, 1.79175946922805, 0.693147180559945, 2.56494935746154, 2.19722457733622, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0, 1.79175946922805, 1.6094379124341, 1.94591014905531, 2.30258509299405, 1.79175946922805, 2.07944154167984, 1.09861228866811, 2.39789527279837, 2.07944154167984, 2.19722457733622, 1.94591014905531, 0, 1.38629436111989, 2.484906649788, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.38629436111989, 3.04452243772342, 1.6094379124341, 2.77258872223978, 1.38629436111989, 1.09861228866811, 2.07944154167984, 2.07944154167984, 2.07944154167984, 1.94591014905531, 2.07944154167984, 1.38629436111989, 2.19722457733622, 2.94443897916644, 1.79175946922805, 1.6094379124341, 2.07944154167984, 2.39789527279837, 2.07944154167984, 2.19722457733622, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.38629436111989, 3.04452243772342, 1.38629436111989, 1.94591014905531, 2.63905732961526, 2.484906649788, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.39789527279837, 2.19722457733622, 1.6094379124341, 1.09861228866811, 2.39789527279837, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.63905732961526, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.6094379124341, 1.79175946922805, 2.30258509299405, 2.30258509299405, 1.6094379124341, 2.30258509299405, 2.19722457733622, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.30258509299405, 2.07944154167984, 1.38629436111989, 0, 2.07944154167984, 2.39789527279837, 2.56494935746154, 1.38629436111989, 1.6094379124341, 0, 1.94591014905531, 2.19722457733622, 2.07944154167984, 2.484906649788, 1.6094379124341, 3.29583686600433, 1.6094379124341, 1.79175946922805, 0, 0, 2.56494935746154, 2.484906649788, 2.39789527279837, 1.09861228866811, 2.63905732961526, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 2.39789527279837, 1.79175946922805, 1.79175946922805, 1.09861228866811, 1.79175946922805, 2.83321334405622, 1.94591014905531, 1.79175946922805, 2.99573227355399, 2.70805020110221, 2.39789527279837, 2.30258509299405, 0, 1.38629436111989, 2.484906649788, 0.693147180559945, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0, 1.79175946922805, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.38629436111989, 0, 2.30258509299405, 1.09861228866811, 2.30258509299405, 1.6094379124341, 1.09861228866811, 1.6094379124341, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.56494935746154, 1.94591014905531, 0, 0.693147180559945, 2.63905732961526, 0.693147180559945, 1.09861228866811, 2.30258509299405, 2.30258509299405, 1.09861228866811, 2.30258509299405, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.94591014905531, 1.79175946922805, 2.63905732961526, 1.79175946922805, 2.19722457733622, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.09861228866811, 2.07944154167984, 0, 1.09861228866811, 2.39789527279837, 1.6094379124341, 2.07944154167984, 2.30258509299405, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.19722457733622, 2.484906649788, 1.6094379124341, 1.94591014905531, 1.94591014905531, 1.6094379124341, 1.79175946922805, 3.2188758248682, 1.94591014905531, 2.30258509299405, 2.19722457733622, 2.56494935746154, 1.94591014905531, 1.94591014905531, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.38629436111989, 1.6094379124341, 1.79175946922805, 2.07944154167984, 1.6094379124341, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.09861228866811, 2.63905732961526, 1.6094379124341, 2.07944154167984, 2.77258872223978, 2.07944154167984, 1.6094379124341, 1.79175946922805, 0.693147180559945, 0.693147180559945, 2.39789527279837, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.30258509299405, 2.484906649788, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 2.30258509299405, 1.94591014905531, 2.07944154167984, 2.19722457733622, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.30258509299405, 2.63905732961526, 2.39789527279837, 0.693147180559945, 2.56494935746154, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.94591014905531, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.6094379124341, 2.484906649788, 2.30258509299405, 2.07944154167984, 2.39789527279837, 1.6094379124341, 1.94591014905531, 2.30258509299405, 2.07944154167984, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.19722457733622, 2.484906649788, 1.79175946922805, 2.39789527279837, 1.38629436111989, 2.484906649788, 1.6094379124341, 1.09861228866811, 1.94591014905531, 0, 2.07944154167984, 2.07944154167984, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.94591014905531, 1.94591014905531, 1.94591014905531, 1.09861228866811, 2.99573227355399, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 2.19722457733622, 2.56494935746154, 1.38629436111989, 2.484906649788, 1.79175946922805, 1.09861228866811, 2.39789527279837, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0.693147180559945, 1.94591014905531, 1.38629436111989, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.79175946922805, 2.99573227355399, 1.09861228866811, 2.19722457733622, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.39789527279837, 1.6094379124341, 0.693147180559945, 1.38629436111989, 2.94443897916644, 1.6094379124341, 1.94591014905531, 2.77258872223978, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.79175946922805, 0, 1.6094379124341, 2.07944154167984, 1.38629436111989, 0, 0, 1.94591014905531, 0, 1.79175946922805, 2.63905732961526, 2.56494935746154, 2.89037175789616, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.07944154167984, 2.07944154167984, 1.38629436111989, 2.30258509299405, 2.30258509299405, 2.19722457733622, 2.99573227355399, 1.6094379124341, 2.63905732961526, 1.6094379124341, 2.30258509299405, 0.693147180559945, 1.09861228866811, 2.77258872223978, 2.30258509299405, 2.39789527279837, 2.63905732961526, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 2.39789527279837, 2.484906649788, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.09861228866811, 2.56494935746154, 1.79175946922805, 2.19722457733622, 1.79175946922805, 1.6094379124341, 1.79175946922805, 1.79175946922805, 1.94591014905531, 2.39789527279837, 1.6094379124341, 1.94591014905531, 1.38629436111989, 2.07944154167984, 2.07944154167984, 1.94591014905531, 2.77258872223978, 2.484906649788, 2.89037175789616, 2.07944154167984, 2.39789527279837, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.38629436111989, 2.19722457733622, 1.94591014905531, 1.38629436111989, 2.30258509299405, 2.07944154167984, 2.83321334405622, 2.83321334405622, 2.19722457733622, 1.94591014905531, 1.09861228866811, 1.94591014905531, 1.6094379124341, 2.484906649788, 1.6094379124341, 2.484906649788, 1.79175946922805, 2.30258509299405, 2.70805020110221, 1.79175946922805, 2.07944154167984, 1.94591014905531, 1.94591014905531, 2.07944154167984, 1.6094379124341, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.56494935746154, 2.07944154167984, 1.94591014905531, 2.19722457733622, 2.19722457733622, 0.693147180559945, 2.19722457733622, 2.07944154167984, 2.94443897916644, 1.79175946922805, 0, 1.94591014905531, 2.70805020110221, 1.79175946922805, 2.19722457733622, 2.484906649788, 2.63905732961526, 1.6094379124341, 2.39789527279837, 2.19722457733622, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.79175946922805, 2.39789527279837, 0, 1.09861228866811, 1.79175946922805, 2.484906649788, 2.39789527279837, 2.07944154167984, 1.09861228866811, 2.07944154167984, 0.693147180559945, 2.07944154167984, 1.94591014905531, 1.6094379124341, 0, 1.38629436111989, 1.38629436111989, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.79175946922805, 1.09861228866811, 2.30258509299405, 2.07944154167984, 1.38629436111989, 1.94591014905531, 2.99573227355399, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.19722457733622, 1.09861228866811, 2.30258509299405, 1.38629436111989, 2.39789527279837, 2.19722457733622, 1.79175946922805, 2.30258509299405, 1.09861228866811, 2.30258509299405, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.94591014905531, 2.70805020110221, 2.30258509299405, 1.38629436111989, 1.09861228866811, 1.79175946922805, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.09861228866811, 2.99573227355399, 2.07944154167984, 1.94591014905531, 0, 1.79175946922805, 1.6094379124341, 2.70805020110221, 0.693147180559945, 1.09861228866811, 2.89037175789616, 2.39789527279837, 1.94591014905531, 2.07944154167984, 2.30258509299405, 0, 2.19722457733622, 2.30258509299405, 1.09861228866811, 1.94591014905531, 2.30258509299405, 1.79175946922805, 3.09104245335832, 1.79175946922805, 2.19722457733622, 1.94591014905531, 2.30258509299405, 2.07944154167984, 2.07944154167984, 0, 2.63905732961526, 2.07944154167984, 2.30258509299405, 1.79175946922805, 2.19722457733622, 1.09861228866811, 0, 2.63905732961526, 2.39789527279837, 0.693147180559945, 0, 2.07944154167984, 1.6094379124341, 1.6094379124341, 2.56494935746154, 1.79175946922805, 2.30258509299405, 2.07944154167984, 1.79175946922805, 0, 1.79175946922805, 2.63905732961526, 2.89037175789616, 1.6094379124341, 1.38629436111989, 1.09861228866811, 2.39789527279837, 2.484906649788, 1.38629436111989, 1.38629436111989, 2.39789527279837, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0.693147180559945, 1.6094379124341, 1.09861228866811, 2.56494935746154, 1.38629436111989, 1.6094379124341, 2.77258872223978, 1.94591014905531, 2.39789527279837, 1.94591014905531, 1.6094379124341, 2.484906649788, 2.39789527279837, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.39789527279837, 2.77258872223978, 2.89037175789616, 0, 1.09861228866811, 1.79175946922805, 2.19722457733622, 2.07944154167984, 2.30258509299405, 1.09861228866811, 2.07944154167984, 2.56494935746154, 0.693147180559945, 2.19722457733622, 1.6094379124341, 2.83321334405622, 2.19722457733622, 1.94591014905531, 2.07944154167984, 1.09861228866811, 2.30258509299405, 2.39789527279837, 0.693147180559945, 2.07944154167984, 1.09861228866811, 2.89037175789616, 1.79175946922805, 2.77258872223978, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.09861228866811, 2.39789527279837, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.79175946922805, 2.30258509299405, 1.94591014905531, 2.63905732961526, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.79175946922805, 1.94591014905531, 2.19722457733622, 1.09861228866811, 2.484906649788, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.94591014905531, 2.19722457733622, 2.70805020110221, 2.484906649788, 0.693147180559945, 2.19722457733622, 1.94591014905531, 1.09861228866811, 2.07944154167984, 1.38629436111989, 1.94591014905531, 2.39789527279837, 0.693147180559945, 0, 0, 2.30258509299405, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.09861228866811, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.38629436111989, 1.94591014905531, 1.38629436111989, 2.07944154167984, 2.56494935746154, 2.30258509299405, 1.79175946922805, 2.77258872223978, 2.07944154167984, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.94591014905531, 2.39789527279837, 2.56494935746154, 1.09861228866811, 1.6094379124341, 2.07944154167984, 2.484906649788, 1.94591014905531, 1.38629436111989, 2.30258509299405, 1.94591014905531, 0.693147180559945, 1.6094379124341, 2.99573227355399, 2.07944154167984, 2.39789527279837, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.79175946922805, 1.94591014905531, 2.484906649788, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.79175946922805, 2.07944154167984, 2.07944154167984, 2.07944154167984, 1.79175946922805, 1.38629436111989, 2.99573227355399, 2.77258872223978, 2.83321334405622, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.38629436111989, 2.30258509299405, 1.79175946922805, 2.30258509299405, 2.39789527279837, 1.09861228866811, 2.30258509299405, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 1.6094379124341, 2.39789527279837, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.6094379124341, 2.77258872223978, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0.693147180559945, 2.19722457733622, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 2.19722457733622, 1.38629436111989, 1.09861228866811, 0, 0, 2.30258509299405, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 2.30258509299405, 0.693147180559945, 0, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.63905732961526, 1.38629436111989, 1.79175946922805, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 2.56494935746154, 0, 0, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 2.39789527279837, 2.19722457733622, 0.693147180559945, 1.94591014905531, 1.38629436111989, 1.09861228866811, 2.30258509299405, 2.07944154167984, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.94591014905531, 0.693147180559945, 2.99573227355399, 2.70805020110221, 1.79175946922805, 0.693147180559945, 0.693147180559945, 2.484906649788, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0, 0, 1.6094379124341, 2.30258509299405, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 0, 2.07944154167984, 1.09861228866811, 1.09861228866811, 2.39789527279837, 0.693147180559945, 0, 1.6094379124341, 1.6094379124341, 1.94591014905531, 1.09861228866811, 0, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.6094379124341, 1.38629436111989, 1.38629436111989, 2.30258509299405, 2.39789527279837, 1.6094379124341, 0, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0, 0, 2.63905732961526, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0, 1.38629436111989, 0, 1.38629436111989, 1.6094379124341, 1.94591014905531, 2.39789527279837, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.79175946922805, 2.484906649788, 1.09861228866811, 2.07944154167984, 1.38629436111989, 0, 1.38629436111989, 1.94591014905531, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 2.07944154167984, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 3.17805383034795, 2.19722457733622, 2.56494935746154, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0, 2.19722457733622, 1.09861228866811, 1.79175946922805, 2.19722457733622, 1.09861228866811, 0, 1.38629436111989, 1.94591014905531, 1.38629436111989, 0, 2.07944154167984, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.30258509299405, 1.79175946922805, 0, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 0, 1.6094379124341, 1.94591014905531, 2.07944154167984, 0.693147180559945, 1.79175946922805, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.6094379124341, 1.09861228866811, 2.07944154167984, 1.6094379124341, 0, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.09861228866811, 2.07944154167984, 2.19722457733622, 1.38629436111989, 2.19722457733622, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 2.19722457733622, 0, 1.94591014905531, 0, 2.77258872223978, 1.38629436111989, 1.6094379124341, 0, 0, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.79175946922805, 2.39789527279837, 1.79175946922805, 2.30258509299405, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 2.07944154167984, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0, 1.09861228866811, 2.19722457733622, 0, 1.94591014905531, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.09861228866811, 2.39789527279837, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0.693147180559945, 2.30258509299405, 1.79175946922805, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.6094379124341, 2.07944154167984, 0.693147180559945, 1.38629436111989, 2.39789527279837, 1.94591014905531, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.94591014905531, 2.63905732961526, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.79175946922805, 2.30258509299405, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 1.38629436111989, 2.19722457733622, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.6094379124341, 2.30258509299405, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.94591014905531, 0.693147180559945, 2.30258509299405, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 2.63905732961526, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0, 1.09861228866811, 1.79175946922805, 2.484906649788, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 0.693147180559945, 0, 2.07944154167984, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 2.39789527279837, 0, 1.79175946922805, 1.38629436111989, 2.07944154167984, 2.07944154167984, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0, 0, 2.19722457733622, 1.79175946922805, 1.38629436111989, 1.38629436111989, 0, 0, 1.79175946922805, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 2.39789527279837, 1.94591014905531, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 2.07944154167984, 0, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.56494935746154, 0.693147180559945, 1.38629436111989, 0, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.94591014905531, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 2.94443897916644, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.6094379124341, 2.30258509299405, 1.94591014905531, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.94591014905531, 0, 1.79175946922805, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 1.38629436111989, 2.19722457733622, 0, 0, 2.56494935746154, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 2.19722457733622, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.94591014905531, 1.09861228866811, 2.56494935746154, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 2.70805020110221, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0, 0, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.6094379124341, 2.07944154167984, 0, 0.693147180559945, 1.79175946922805, 0, 1.79175946922805, 1.09861228866811, 0, 0, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0, 1.6094379124341, 2.19722457733622, 1.79175946922805, 0, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.79175946922805, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0, 0, 0, 0, 2.07944154167984, 0.693147180559945, 0, 1.79175946922805, 2.30258509299405, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.38629436111989, 0, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.79175946922805, 0, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.6094379124341, 0, 2.19722457733622, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 2.07944154167984, 0, 1.6094379124341, 0, 1.6094379124341, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.79175946922805, 1.94591014905531, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.09861228866811, 2.19722457733622, 0, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.94591014905531, 1.6094379124341, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 2.07944154167984, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 2.07944154167984, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0, 0, 1.38629436111989, 1.94591014905531, 1.38629436111989, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 0, 0, 1.94591014905531, 0, 1.09861228866811, 1.09861228866811, 2.39789527279837, 0.693147180559945, 2.63905732961526, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.94591014905531, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.94591014905531, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.19722457733622, 1.38629436111989, 1.94591014905531, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.38629436111989, 0, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.94591014905531, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 1.94591014905531, 0, 1.94591014905531, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.94591014905531, 2.39789527279837, 1.6094379124341, 2.19722457733622, 1.79175946922805, 2.39789527279837, 2.07944154167984, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.07944154167984, 2.07944154167984, 1.09861228866811, 0, 2.30258509299405, 0.693147180559945, 0.693147180559945, 2.19722457733622, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 1.94591014905531, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 2.39789527279837, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0, 2.484906649788, 2.07944154167984, 1.94591014905531, 1.6094379124341, 1.09861228866811, 1.6094379124341, 2.07944154167984, 1.6094379124341, 1.79175946922805, 1.09861228866811, 2.30258509299405, 0.693147180559945, 0.693147180559945, 0, 2.19722457733622, 1.38629436111989, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.94591014905531, 2.19722457733622, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0, 2.484906649788, 1.6094379124341, 2.07944154167984, 0, 0, 2.19722457733622, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.94591014905531, 2.19722457733622, 1.94591014905531, 2.07944154167984, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0.693147180559945, 2.19722457733622, 1.94591014905531, 1.94591014905531, 0, 1.79175946922805, 0, 1.38629436111989, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0.693147180559945, 2.07944154167984, 2.07944154167984, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.94591014905531, 1.6094379124341, 0, 1.09861228866811, 2.39789527279837, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0, 1.38629436111989, 1.09861228866811, 1.94591014905531, 0, 0, 0.693147180559945, 1.38629436111989, 1.94591014905531, 0, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 0, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 2.07944154167984, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 2.484906649788, 1.09861228866811, 0, 2.484906649788, 2.30258509299405, 0, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.6094379124341, 2.56494935746154, 1.94591014905531, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0, 0, 1.94591014905531, 2.07944154167984, 2.07944154167984, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.38629436111989, 2.39789527279837, 0, 2.39789527279837, 1.38629436111989, 0, 2.19722457733622, 1.09861228866811, 0, 1.6094379124341, 2.30258509299405, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.6094379124341, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 2.39789527279837, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.07944154167984, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.09861228866811, 1.79175946922805, 1.94591014905531, 0, 0, 1.94591014905531, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 0, 1.6094379124341, 2.77258872223978, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 2.30258509299405, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.79175946922805, 0.693147180559945, 1.79175946922805, 0, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 1.6094379124341, 1.6094379124341, 2.30258509299405, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 0, 0.693147180559945, 0, 1.79175946922805, 1.94591014905531, 2.484906649788, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 0, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0.693147180559945, 2.19722457733622, 1.09861228866811, 2.19722457733622, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 0, 0, 1.38629436111989, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.09861228866811, 2.07944154167984, 2.30258509299405, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.94591014905531, 1.38629436111989, 2.19722457733622, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.94591014905531, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 2.19722457733622, 2.30258509299405, 0, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.09861228866811, 0.693147180559945, 2.07944154167984, 1.6094379124341, 1.6094379124341, 0, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0, 1.94591014905531, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.09861228866811, 2.07944154167984, 2.07944154167984, 1.09861228866811, 0.693147180559945, 2.70805020110221, 0.693147180559945, 2.30258509299405, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.6094379124341, 0, 2.39789527279837, 1.09861228866811, 1.94591014905531, 1.09861228866811, 2.70805020110221, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.484906649788, 1.09861228866811, 0, 0.693147180559945, 2.19722457733622, 1.38629436111989, 1.94591014905531, 0, 0, 1.6094379124341, 1.6094379124341, 0, 0, 1.09861228866811, 0.693147180559945, 0, 2.30258509299405, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0, 2.07944154167984, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.79175946922805, 2.77258872223978, 1.38629436111989, 1.38629436111989, 1.6094379124341, 2.19722457733622, 1.79175946922805, 1.6094379124341, 0.693147180559945, 2.07944154167984, 1.94591014905531, 1.38629436111989, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 2.19722457733622, 1.38629436111989, 0, 0, 2.30258509299405, 0, 1.79175946922805, 1.6094379124341, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0, 0, 0, 1.38629436111989, 2.07944154167984, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 2.19722457733622, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.07944154167984, 2.83321334405622, 1.6094379124341, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0, 1.79175946922805, 0.693147180559945, 0, 1.38629436111989, 1.94591014905531, 1.6094379124341, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0, 2.56494935746154, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 0, 2.30258509299405, 1.6094379124341, 1.38629436111989, 1.09861228866811, 2.19722457733622, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 1.79175946922805, 2.07944154167984, 1.79175946922805, 2.19722457733622, 1.94591014905531, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.79175946922805, 2.39789527279837, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0, 0, 1.09861228866811, 0.693147180559945, 1.94591014905531, 2.63905732961526, 1.79175946922805, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.38629436111989, 2.39789527279837, 0, 2.30258509299405, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.94591014905531, 2.07944154167984, 0, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.30258509299405, 1.38629436111989, 0, 2.07944154167984, 1.38629436111989, 2.07944154167984, 1.6094379124341, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 1.38629436111989, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.38629436111989, 2.07944154167984, 0, 0, 2.07944154167984, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 2.07944154167984, 1.79175946922805, 1.38629436111989, 2.30258509299405, 0.693147180559945, 1.6094379124341, 2.07944154167984, 1.09861228866811, 2.07944154167984, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 2.07944154167984, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 2.07944154167984, 2.484906649788, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0, 1.79175946922805, 0, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.07944154167984, 1.79175946922805, 1.09861228866811, 1.09861228866811, 2.63905732961526, 1.6094379124341, 2.39789527279837, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0, 1.6094379124341, 1.6094379124341, 2.19722457733622, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 2.19722457733622, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0, 0, 1.6094379124341, 1.94591014905531, 0, 0, 0, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.38629436111989, 2.07944154167984, 0, 1.6094379124341, 1.09861228866811, 1.38629436111989, 2.63905732961526, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.09861228866811, 2.484906649788, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.94591014905531, 0.693147180559945, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.38629436111989, 1.38629436111989, 3.58351893845611, 1.09861228866811, 1.09861228866811, 1.79175946922805, 0, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0, 0, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 2.39789527279837, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0, 2.484906649788, 0.693147180559945, 0, 1.38629436111989, 1.94591014905531, 0, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 2.07944154167984, 1.09861228866811, 1.94591014905531, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 0, 0, 0, 1.38629436111989, 1.38629436111989, 0, 0, 0.693147180559945, 2.07944154167984, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.94591014905531, 2.07944154167984, 0.693147180559945, 0, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0, 0, 1.6094379124341, 1.6094379124341, 0, 0.693147180559945, 0, 2.484906649788, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0, 1.79175946922805, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 1.79175946922805, 0, 1.79175946922805, 1.79175946922805, 0, 1.94591014905531, 0, 1.38629436111989, 1.79175946922805, 0, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 0, 0.693147180559945, 0, 0, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.79175946922805, 1.09861228866811, 0, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 0.693147180559945, 1.6094379124341, 1.79175946922805, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 2.07944154167984, 0, 0, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0, 0.693147180559945, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0, 0, 0, 2.39789527279837, 0, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 1.6094379124341, 1.79175946922805, 2.07944154167984, 2.70805020110221, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 0, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 2.94443897916644, 1.79175946922805, 1.09861228866811, 1.09861228866811, 0, 1.38629436111989, 0, 0, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0.693147180559945, 0, 0, 0, 0.693147180559945, 1.79175946922805, 0, 0, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 0, 1.94591014905531, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 0, 0, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.79175946922805, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 2.30258509299405, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.09861228866811, 2.19722457733622, 0, 2.19722457733622, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 0, 0.693147180559945, 0, 2.19722457733622, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 2.99573227355399, 0, 0, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.6094379124341, 2.07944154167984, 0, 0.693147180559945, 0, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 0, 1.09861228866811, 0, 1.6094379124341, 1.79175946922805, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 1.38629436111989, 1.38629436111989, 0, 1.6094379124341, 0, 1.38629436111989, 0.693147180559945, 0, 1.79175946922805, 0, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 0, 0, 1.09861228866811, 1.6094379124341, 0, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 0, 0, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 2.30258509299405, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0, 1.6094379124341, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0, 1.09861228866811, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.79175946922805, 0, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 0, 0, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 2.77258872223978, 1.6094379124341, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0, 0, 0, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 1.6094379124341, 0, 0, 0.693147180559945, 1.6094379124341, 2.07944154167984, 0.693147180559945, 0, 0, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 0, 0, 2.07944154167984, 0, 0, 1.6094379124341, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0, 0, 2.07944154167984, 0, 0, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 1.79175946922805, 0, 2.30258509299405, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 1.09861228866811, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0, 1.6094379124341, 0, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 1.6094379124341, 0, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 2.19722457733622, 1.79175946922805, 0, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 2.07944154167984, 1.09861228866811, 1.38629436111989, 0, 0, 0, 0, 1.09861228866811, 0, 2.07944154167984, 1.09861228866811, 0, 0, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0.693147180559945, 2.30258509299405, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0, 0, 1.09861228866811, 1.6094379124341, 0, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.94591014905531, 0, 1.94591014905531, 0.693147180559945, 0, 2.39789527279837, 2.77258872223978, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 0, 2.19722457733622, 1.09861228866811, 2.19722457733622, 0, 0, 1.6094379124341, 2.19722457733622, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 2.30258509299405, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0, 1.09861228866811, 1.79175946922805, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.79175946922805, 1.6094379124341, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 0, 0, 0, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 2.63905732961526, 0, 0, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 2.19722457733622, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 0.693147180559945, 0, 1.94591014905531, 0.693147180559945, 0, 0, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 0, 2.19722457733622, 1.38629436111989, 2.30258509299405, 0, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 0, 1.38629436111989, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.79175946922805, 0, 0, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0, 0, 0, 0, 0, 1.09861228866811, 0, 0, 1.6094379124341, 0, 1.79175946922805, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 1.38629436111989, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 0, 1.79175946922805, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 0, 2.19722457733622, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 2.07944154167984, 1.09861228866811, 0, 0, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.79175946922805, 0.693147180559945, 0.693147180559945, 0, 0, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 2.07944154167984, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 0, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.484906649788, 1.09861228866811, 1.09861228866811, 2.30258509299405, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.94591014905531, 1.79175946922805, 1.79175946922805, 0, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0, 1.09861228866811, 0, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 2.19722457733622, 0, 0, 0.693147180559945, 2.30258509299405, 1.09861228866811, 0.693147180559945, 0, 0, 1.38629436111989, 1.38629436111989, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 1.79175946922805, 1.6094379124341, 0, 0.693147180559945, 1.38629436111989, 0, 2.07944154167984, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 0, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 2.19722457733622, 0.693147180559945, 1.38629436111989, 0, 0, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0, 0, 1.09861228866811, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0, 0, 1.79175946922805, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0, 2.83321334405622, 0, 0.693147180559945, 1.38629436111989, 0, 1.38629436111989, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.09861228866811, 0, 1.38629436111989, 0, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 1.79175946922805, 1.6094379124341, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 0, 1.94591014905531, 1.09861228866811, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 1.38629436111989, 2.07944154167984, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.38629436111989, 0, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 2.19722457733622, 1.94591014905531, 0, 1.38629436111989, 1.79175946922805, 2.39789527279837, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 0, 2.56494935746154, 1.94591014905531, 1.38629436111989, 0, 0.693147180559945, 2.39789527279837, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 2.39789527279837, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.94591014905531, 2.07944154167984, 1.38629436111989, 0, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.09861228866811, 2.19722457733622, 1.94591014905531, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 2.63905732961526, 1.38629436111989, 0.693147180559945, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.79175946922805, 1.94591014905531, 0.693147180559945, 0, 1.94591014905531, 1.09861228866811, 2.484906649788, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.09861228866811, 2.07944154167984, 2.39789527279837, 2.30258509299405, 1.09861228866811, 1.38629436111989, 0.693147180559945, 2.484906649788, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.79175946922805, 1.79175946922805, 1.6094379124341, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.79175946922805, 0.693147180559945, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0, 1.79175946922805, 2.30258509299405, 2.07944154167984, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.79175946922805, 0, 0, 2.56494935746154, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.07944154167984, 2.07944154167984, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 2.39789527279837, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.38629436111989, 0, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 2.39789527279837, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.6094379124341, 1.6094379124341, 0, 1.79175946922805, 2.30258509299405, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 0.693147180559945, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 2.63905732961526, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.79175946922805, 0, 1.09861228866811, 0.693147180559945, 1.79175946922805, 1.09861228866811, 1.94591014905531, 2.19722457733622, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 2.89037175789616, 1.94591014905531, 1.79175946922805, 2.30258509299405, 0.693147180559945, 1.38629436111989, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0.693147180559945, 2.39789527279837, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.79175946922805, 0, 2.07944154167984, 1.09861228866811, 2.07944154167984, 2.07944154167984, 0.693147180559945, 2.07944154167984, 1.79175946922805, 3.09104245335832, 2.19722457733622, 0.693147180559945, 0.693147180559945, 2.30258509299405, 1.38629436111989, 0, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 0, 1.38629436111989, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.79175946922805, 2.63905732961526, 0.693147180559945, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.09861228866811, 2.19722457733622, 1.38629436111989, 0, 1.79175946922805, 1.79175946922805, 1.6094379124341, 0, 1.38629436111989, 2.30258509299405, 1.09861228866811, 1.09861228866811, 2.30258509299405, 0, 1.09861228866811, 1.94591014905531, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 2.39789527279837, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.6094379124341, 2.07944154167984, 1.09861228866811, 2.19722457733622, 2.30258509299405, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0, 1.6094379124341, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.6094379124341, 2.07944154167984, 1.79175946922805, 1.6094379124341, 1.79175946922805, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.79175946922805, 1.09861228866811, 0, 1.6094379124341, 0, 2.07944154167984, 0.693147180559945, 1.6094379124341, 0, 1.38629436111989, 1.09861228866811, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.30258509299405, 0, 1.79175946922805, 2.07944154167984, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.09861228866811, 2.19722457733622, 0, 0, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.94591014905531, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.09861228866811, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0, 1.94591014905531, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.94591014905531, 0.693147180559945, 1.6094379124341, 0, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.09861228866811, 1.6094379124341, 2.07944154167984, 1.79175946922805, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.09861228866811, 1.94591014905531, 1.6094379124341, 1.38629436111989, 1.09861228866811, 1.79175946922805, 1.38629436111989, 0, 1.38629436111989, 1.79175946922805, 1.09861228866811, 2.70805020110221, 0, 0.693147180559945, 1.09861228866811, 0, 1.79175946922805, 0.693147180559945, 1.38629436111989, 1.79175946922805, 2.19722457733622, 1.79175946922805, 2.39789527279837, 2.30258509299405, 2.30258509299405, 1.09861228866811, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.79175946922805, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.79175946922805, 1.94591014905531, 2.30258509299405, 1.6094379124341, 2.07944154167984, 1.09861228866811, 1.38629436111989, 2.07944154167984, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.09861228866811, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 2.39789527279837, 1.09861228866811, 2.19722457733622, 0.693147180559945, 0.693147180559945, 0, 2.30258509299405, 0, 0.693147180559945, 1.38629436111989, 1.38629436111989, 1.38629436111989, 1.79175946922805, 2.39789527279837, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.6094379124341, 2.19722457733622, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.79175946922805, 1.79175946922805, 0.693147180559945, 1.79175946922805, 2.19722457733622, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0, 2.39789527279837, 1.38629436111989, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 1.38629436111989, 1.94591014905531, 0.693147180559945, 0, 2.39789527279837, 1.38629436111989, 0, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.94591014905531, 1.79175946922805, 1.79175946922805, 1.94591014905531, 0.693147180559945, 0, 2.484906649788, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.09861228866811, 1.09861228866811, 2.07944154167984, 1.94591014905531, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.94591014905531, 1.38629436111989, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.94591014905531, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.6094379124341, 1.38629436111989, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 0, 1.79175946922805, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 2.39789527279837, 1.09861228866811, 1.94591014905531, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.6094379124341, 2.39789527279837, 0, 1.09861228866811, 2.30258509299405, 1.94591014905531, 1.6094379124341, 1.6094379124341, 2.30258509299405, 0.693147180559945, 1.94591014905531, 1.6094379124341, 1.79175946922805, 1.94591014905531, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.79175946922805, 2.07944154167984, 1.38629436111989, 0.693147180559945, 0.693147180559945, 2.07944154167984, 2.07944154167984, 0.693147180559945, 2.39789527279837, 0, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 1.09861228866811, 1.79175946922805, 1.6094379124341, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.6094379124341, 0.693147180559945, 2.07944154167984, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 2.484906649788, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.94591014905531, 0.693147180559945, 0, 0, 2.07944154167984, 1.79175946922805, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.79175946922805, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.94591014905531, 0, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 1.79175946922805, 1.38629436111989, 1.38629436111989, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 0, 1.94591014905531, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 1.79175946922805, 1.38629436111989, 2.07944154167984, 1.94591014905531, 1.94591014905531, 1.38629436111989, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.94591014905531, 0, 1.79175946922805, 1.09861228866811, 0.693147180559945, 0, 1.79175946922805, 1.38629436111989, 1.94591014905531, 0, 0.693147180559945, 1.6094379124341, 2.484906649788, 1.6094379124341, 1.09861228866811, 1.09861228866811, 2.30258509299405, 0, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0.693147180559945, 0.693147180559945, 1.94591014905531, 0.693147180559945, 2.07944154167984, 2.19722457733622, 2.07944154167984, 1.6094379124341, 1.09861228866811, 1.38629436111989, 1.94591014905531, 0, 1.38629436111989, 0, 1.6094379124341, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0.693147180559945, 0, 1.79175946922805, 2.07944154167984, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 1.09861228866811, 0, 1.09861228866811, 1.6094379124341, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0, 1.79175946922805, 0, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.6094379124341, 2.39789527279837, 2.56494935746154, 0.693147180559945, 0, 0, 0, 1.79175946922805, 0.693147180559945, 1.6094379124341, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0, 1.79175946922805, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.6094379124341, 1.6094379124341, 0, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 1.38629436111989, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0, 1.09861228866811, 0, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 0, 0, 0, 0.693147180559945, 1.09861228866811, 2.07944154167984, 1.79175946922805, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 0, 2.39789527279837, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 0, 0, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.38629436111989, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 0, 2.30258509299405, 1.79175946922805, 1.09861228866811, 0.693147180559945, 2.19722457733622, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 1.94591014905531, 0, 1.6094379124341, 0, 0.693147180559945, 0, 1.38629436111989, 2.19722457733622, 1.6094379124341, 0, 1.6094379124341, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 0, 0, 2.39789527279837, 0.693147180559945, 0, 0, 1.09861228866811, 0, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.94591014905531, 0, 1.09861228866811, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 1.09861228866811, 2.19722457733622, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.79175946922805, 1.09861228866811, 0, 1.38629436111989, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0.693147180559945, 2.19722457733622, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 1.94591014905531, 1.38629436111989, 0, 0, 1.38629436111989, 1.38629436111989, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.6094379124341, 1.79175946922805, 2.63905732961526, 1.38629436111989, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 0, 0, 1.6094379124341, 0, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0, 1.38629436111989, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0, 1.09861228866811, 0, 1.38629436111989, 0, 0, 0, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 0.693147180559945, 0, 1.38629436111989, 0, 0, 0, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 0, 1.09861228866811, 1.94591014905531, 1.09861228866811, 0, 0, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 2.07944154167984, 0, 0.693147180559945, 0, 0, 2.19722457733622, 0, 0, 0, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0, 2.484906649788, 1.38629436111989, 0, 0, 1.09861228866811, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0.693147180559945, 1.38629436111989, 1.79175946922805, 0, 1.09861228866811, 0, 0, 1.79175946922805, 1.09861228866811, 0, 0, 0, 1.94591014905531, 0.693147180559945, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 2.39789527279837, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 1.09861228866811, 1.38629436111989, 1.38629436111989, 1.6094379124341, 0, 1.38629436111989, 0, 0, 0, 0.693147180559945, 1.6094379124341, 1.09861228866811, 0.693147180559945, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0, 1.38629436111989, 2.30258509299405, 0, 0, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.94591014905531, 1.94591014905531, 0, 1.09861228866811, 0, 0.693147180559945, 1.6094379124341, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 2.19722457733622, 0.693147180559945, 1.79175946922805, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.38629436111989, 2.07944154167984, 1.38629436111989, 0, 1.38629436111989, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0, 0, 0.693147180559945, 1.94591014905531, 1.09861228866811, 1.6094379124341, 0.693147180559945, 0, 0.693147180559945, 0, 1.6094379124341, 1.79175946922805, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0.693147180559945, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 1.6094379124341, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 0, 1.09861228866811, 1.38629436111989, 2.30258509299405, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.09861228866811, 0, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 1.38629436111989, 1.79175946922805, 1.38629436111989, 0, 0, 1.38629436111989, 0.693147180559945, 1.38629436111989, 0, 0, 1.09861228866811, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 0, 0, 1.6094379124341, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 1.94591014905531, 0, 0.693147180559945, 0, 1.09861228866811, 2.484906649788, 0, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0, 1.6094379124341, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.79175946922805, 0, 0, 1.6094379124341, 1.09861228866811, 1.09861228866811, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 1.94591014905531, 1.09861228866811, 0.693147180559945, 2.30258509299405, 0, 1.09861228866811, 0.693147180559945, 0, 0, 0, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.38629436111989, 0, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0, 2.07944154167984, 0, 1.09861228866811, 0, 1.09861228866811, 0, 0, 0, 0, 0, 1.6094379124341, 0, 1.09861228866811, 2.07944154167984, 0, 0, 0.693147180559945, 1.38629436111989, 0, 0, 0, 0.693147180559945, 0, 1.38629436111989, 0, 1.79175946922805, 1.79175946922805, 1.09861228866811, 1.6094379124341, 1.6094379124341, 1.6094379124341, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 0, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.6094379124341, 1.94591014905531, 1.6094379124341, 0.693147180559945, 1.38629436111989, 0, 0.693147180559945, 0, 0, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.6094379124341, 0.693147180559945, 1.94591014905531, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 0, 1.09861228866811, 1.79175946922805, 0, 1.09861228866811, 1.79175946922805, 0.693147180559945, 1.09861228866811, 0, 0, 1.09861228866811, 0.693147180559945, 0.693147180559945, 0, 1.6094379124341, 0, 0.693147180559945, 0, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0, 1.38629436111989, 0.693147180559945, 1.6094379124341, 0, 0, 0, 0, 0, 2.19722457733622, 0, 1.79175946922805, 1.38629436111989, 1.09861228866811, 0, 0, 0, 1.09861228866811, 2.39789527279837, 1.38629436111989, 0.693147180559945, 0, 0.693147180559945, 0, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 1.94591014905531, 1.38629436111989, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.6094379124341, 0, 0.693147180559945, 0, 0, 1.09861228866811, 0, 0, 1.09861228866811, 0, 1.38629436111989, 0, 1.09861228866811, 0.693147180559945, 1.09861228866811, 0, 0, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.79175946922805, 0.693147180559945, 1.6094379124341, 2.07944154167984, 0, 0.693147180559945, 0, 0.693147180559945, 0, 0, 0.693147180559945, 2.19722457733622, 0, 0, 1.38629436111989, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.38629436111989, 1.79175946922805, 0.693147180559945, 2.30258509299405, 0.693147180559945, 1.6094379124341, 1.94591014905531, 0, 0, 0.693147180559945, 1.38629436111989, 1.09861228866811, 1.6094379124341, 0, 0.693147180559945, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 1.6094379124341, 0.693147180559945, 0, 1.6094379124341, 0.693147180559945, 0, 1.38629436111989, 0, 1.09861228866811, 1.09861228866811, 0, 0, 1.09861228866811, 2.63905732961526, 1.09861228866811, 1.79175946922805, 1.09861228866811, 1.6094379124341, 2.77258872223978, 1.38629436111989, 0, 1.09861228866811, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.79175946922805, 0, 0.693147180559945, 0.693147180559945, 0, 0, 1.09861228866811, 0, 1.94591014905531, 0.693147180559945, 1.09861228866811, 0.693147180559945, 0, 0.693147180559945, 1.6094379124341, 0, 0.693147180559945, 0, 0, 1.38629436111989, 0, 0, 0, 0.693147180559945, 1.6094379124341, 2.19722457733622, 1.09861228866811, 0.693147180559945, 1.38629436111989, 0.693147180559945, 0.693147180559945, 0.693147180559945, 1.09861228866811, 1.09861228866811, 1.38629436111989, 0, 0.693147180559945, 0, 0.693147180559945, 0.693147180559945, 1.6094379124341, 0, 1.94591014905531, 1.09861228866811, 1.38629436111989, 0.693147180559945, 1.79175946922805, 0, 0, 0, 0.693147180559945 }; + var targets = new double[] { 1, 7, 7, 2, 3, 7, 7, 7, 2, 7, 7, 7, 7, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 3, 7, 2, 7, 7, 7, 7, 7, 5, 3, 7, 7, 7, 7, 7, 7, 3, 7, 7, 6, 7, 7, 2, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 2, 2, 7, 7, 17, 7, 2, 7, 1, 7, 7, 7, 7, 7, 7, 17, 7, 7, 7, 7, 7, 17, 7, 2, 7, 7, 1, 7, 1, 7, 7, 17, 7, 7, 1, 7, 7, 7, 7, 7, 7, 7, 2, 2, 2, 2, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 0, 7, 1, 7, 1, 7, 7, 7, 7, 2, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 2, 2, 7, 2, 2, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 2, 2, 7, 2, 7, 1, 7, 2, 7, 7, 1, 7, 7, 7, 5, 2, 2, 7, 7, 2, 7, 7, 7, 7, 7, 17, 7, 2, 2, 1, 2, 7, 2, 7, 7, 2, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 2, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 2, 7, 7, 17, 7, 7, 7, 7, 7, 7, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 2, 7, 2, 7, 7, 2, 7, 14, 2, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1, 7, 7, 7, 7, 7, 7, 2, 7, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 7, 7, 3, 2, 2, 7, 7, 2, 2, 2, 2, 7, 7, 1, 2, 2, 2, 2, 2, 2, 17, 17, 2, 7, 7, 2, 1, 2, 2, 2, 3, 2, 2, 2, 2, 2, 3, 3, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 3, 1, 2, 3, 2, 2, 3, 2, 3, 3, 2, 2, 2, 2, 2, 2, 1, 3, 3, 1, 3, 3, 1, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 3, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 1, 14, 2, 2, 2, 1, 3, 1, 2, 1, 2, 2, 2, 1, 1, 3, 2, 2, 1, 2, 1, 2, 7, 2, 2, 2, 1, 2, 4, 2, 2, 2, 3, 1, 3, 1, 3, 2, 3, 3, 2, 3, 2, 2, 2, 1, 2, 1, 2, 2, 3, 2, 2, 2, 1, 2, 2, 14, 2, 2, 2, 3, 3, 2, 1, 2, 3, 2, 2, 1, 3, 2, 2, 2, 2, 1, 1, 2, 2, 3, 2, 3, 3, 1, 1, 2, 2, 2, 1, 2, 3, 2, 3, 2, 2, 4, 14, 2, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 1, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 3, 3, 7, 3, 2, 2, 1, 1, 3, 2, 2, 2, 1, 2, 3, 2, 1, 2, 3, 2, 1, 2, 2, 1, 2, 2, 2, 2, 3, 1, 2, 3, 2, 3, 1, 2, 15, 2, 2, 2, 13, 3, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 1, 1, 2, 3, 2, 3, 2, 2, 2, 3, 3, 2, 3, 3, 2, 2, 2, 3, 3, 2, 2, 3, 2, 2, 3, 17, 3, 2, 2, 3, 3, 2, 2, 3, 2, 2, 3, 1, 2, 3, 3, 7, 2, 2, 3, 2, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 3, 3, 3, 1, 3, 2, 3, 1, 3, 2, 2, 3, 3, 2, 2, 3, 3, 3, 3, 3, 2, 3, 2, 3, 2, 3, 2, 1, 2, 2, 3, 2, 3, 1, 2, 3, 2, 2, 3, 2, 2, 1, 2, 3, 2, 2, 2, 2, 1, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 2, 1, 2, 1, 1, 13, 15, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 1, 2, 14, 1, 1, 14, 1, 16, 15, 2, 2, 3, 2, 2, 2, 2, 14, 2, 1, 2, 2, 2, 2, 14, 13, 5, 13, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 2, 13, 1, 14, 2, 1, 2, 1, 1, 2, 2, 1, 7, 2, 1, 1, 1, 1, 2, 2, 2, 3, 3, 1, 1, 2, 2, 2, 2, 14, 2, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 3, 2, 1, 2, 1, 14, 16, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 3, 2, 1, 2, 2, 14, 13, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 13, 14, 3, 2, 1, 3, 2, 2, 2, 6, 2, 2, 2, 1, 1, 14, 3, 13, 17, 1, 14, 2, 2, 1, 2, 3, 2, 3, 2, 2, 1, 1, 2, 2, 1, 2, 15, 3, 14, 1, 1, 2, 1, 1, 1, 1, 2, 2, 14, 1, 14, 1, 1, 2, 2, 1, 2, 2, 1, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 1, 1, 1, 16, 2, 2, 1, 2, 1, 14, 16, 14, 14, 14, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 3, 14, 13, 2, 14, 14, 1, 3, 1, 2, 14, 2, 1, 1, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 2, 1, 2, 2, 14, 2, 2, 1, 2, 2, 3, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 2, 2, 1, 2, 2, 2, 3, 1, 4, 2, 2, 2, 2, 1, 2, 1, 3, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 13, 1, 1, 2, 1, 13, 1, 2, 2, 2, 2, 2, 16, 1, 2, 1, 2, 1, 1, 2, 2, 3, 2, 1, 2, 1, 2, 1, 2, 1, 1, 2, 3, 2, 1, 2, 2, 1, 2, 2, 3, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 5, 2, 1, 1, 2, 3, 2, 2, 2, 2, 3, 3, 2, 2, 2, 1, 1, 3, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 2, 1, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 14, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 2, 2, 2, 1, 3, 2, 3, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2, 17, 2, 2, 2, 3, 2, 1, 2, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 3, 1, 1, 2, 2, 2, 2, 2, 2, 2, 14, 3, 1, 13, 2, 2, 1, 2, 1, 1, 2, 2, 1, 1, 6, 2, 1, 1, 2, 3, 1, 2, 7, 2, 2, 2, 1, 1, 1, 2, 2, 13, 2, 1, 2, 14, 1, 1, 1, 2, 3, 1, 2, 2, 2, 2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 2, 1, 2, 6, 1, 2, 2, 2, 1, 1, 2, 1, 2, 5, 1, 2, 1, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 7, 2, 1, 1, 2, 2, 1, 2, 2, 13, 2, 2, 2, 1, 2, 3, 2, 2, 2, 2, 2, 1, 3, 3, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 17, 14, 14, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 1, 1, 1, 2, 2, 1, 2, 3, 1, 1, 2, 2, 2, 15, 2, 2, 1, 1, 16, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 14, 2, 1, 3, 1, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 3, 1, 1, 1, 1, 2, 2, 1, 3, 2, 3, 2, 2, 1, 2, 3, 14, 3, 17, 3, 2, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 3, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 1, 2, 3, 2, 3, 3, 2, 2, 13, 2, 1, 1, 1, 3, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 3, 3, 2, 2, 1, 2, 2, 3, 1, 2, 3, 1, 2, 2, 3, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 3, 3, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 2, 1, 2, 3, 3, 2, 2, 1, 1, 2, 2, 1, 2, 1, 2, 2, 2, 3, 3, 2, 1, 2, 2, 3, 2, 3, 3, 2, 1, 7, 2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 2, 2, 3, 1, 2, 2, 2, 1, 3, 2, 2, 2, 2, 7, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 3, 2, 1, 2, 1, 2, 3, 2, 1, 2, 2, 2, 17, 2, 1, 1, 2, 1, 2, 2, 3, 2, 2, 3, 1, 1, 1, 2, 2, 2, 2, 1, 3, 3, 3, 2, 2, 2, 2, 1, 1, 3, 2, 2, 2, 2, 3, 2, 1, 7, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 3, 3, 3, 2, 1, 2, 2, 2, 3, 3, 2, 2, 3, 3, 2, 2, 2, 3, 3, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 16, 1, 2, 1, 2, 2, 2, 2, 2, 3, 2, 2, 1, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 3, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 2, 13, 2, 1, 1, 2, 1, 1, 1, 1, 2, 3, 13, 2, 2, 2, 2, 2, 3, 2, 2, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 1, 1, 2, 2, 14, 1, 3, 2, 14, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 13, 2, 3, 2, 2, 2, 2, 2, 2, 17, 1, 1, 3, 15, 1, 2, 2, 2, 2, 14, 2, 2, 2, 14, 2, 1, 2, 2, 2, 3, 1, 2, 2, 2, 2, 1, 2, 1, 13, 1, 2, 1, 2, 2, 16, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 1, 17, 2, 2, 2, 1, 1, 1, 2, 1, 3, 13, 14, 2, 3, 2, 3, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 17, 3, 2, 1, 3, 1, 3, 6, 3, 2, 4, 3, 2, 3, 2, 2, 14, 1, 2, 14, 1, 3, 2, 2, 1, 2, 2, 3, 1, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 1, 14, 2, 1, 2, 2, 3, 2, 3, 3, 3, 2, 3, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 17, 1, 2, 15, 2, 2, 1, 3, 2, 1, 2, 2, 3, 2, 2, 16, 2, 1, 1, 2, 2, 2, 3, 1, 1, 2, 3, 2, 2, 2, 2, 3, 1, 2, 2, 3, 2, 3, 16, 2, 2, 3, 3, 3, 1, 1, 2, 2, 3, 3, 2, 2, 2, 3, 2, 2, 3, 2, 15, 14, 3, 2, 2, 2, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 3, 3, 2, 2, 3, 7, 3, 2, 2, 2, 2, 3, 2, 2, 5, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 3, 3, 3, 3, 3, 4, 2, 2, 2, 2, 2, 3, 2, 3, 3, 2, 3, 2, 13, 2, 3, 3, 14, 3, 2, 3, 2, 13, 3, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 3, 2, 3, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 3, 2, 1, 3, 3, 2, 5, 2, 2, 3, 3, 2, 3, 1, 3, 3, 2, 3, 2, 2, 3, 1, 1, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 2, 3, 2, 3, 3, 3, 3, 2, 2, 2, 3, 2, 2, 3, 2, 2, 3, 3, 3, 3, 3, 2, 3, 3, 3, 15, 2, 2, 2, 3, 3, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 2, 3, 2, 3, 3, 7, 1, 2, 2, 2, 3, 3, 2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 2, 13, 2, 3, 3, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 16, 2, 2, 2, 2, 2, 3, 2, 3, 3, 2, 2, 13, 2, 3, 2, 2, 2, 2, 2, 3, 3, 14, 2, 3, 2, 3, 3, 2, 3, 2, 2, 3, 13, 2, 2, 3, 2, 2, 2, 1, 2, 2, 3, 1, 14, 2, 2, 2, 2, 1, 2, 2, 1, 3, 2, 2, 2, 3, 3, 3, 2, 1, 2, 2, 14, 3, 3, 2, 3, 2, 2, 3, 2, 2, 16, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 13, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 2, 15, 2, 2, 15, 14, 13, 2, 3, 2, 2, 3, 3, 2, 2, 16, 2, 2, 2, 2, 2, 2, 3, 2, 14, 2, 2, 14, 13, 2, 2, 2, 3, 2, 14, 2, 2, 2, 1, 2, 13, 2, 2, 2, 2, 2, 3, 13, 2, 2, 2, 2, 2, 2, 2, 2, 13, 13, 6, 2, 3, 2, 14, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 13, 2, 2, 16, 2, 13, 13, 1, 2, 2, 3, 2, 2, 3, 3, 2, 2, 2, 2, 1, 3, 2, 3, 3, 3, 6, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 3, 2, 2, 3, 2, 3, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 3, 2, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 3, 2, 3, 2, 2, 2, 2, 3, 2, 2, 2, 3, 3, 17, 2, 3, 3, 3, 3, 2, 2, 2, 2, 3, 2, 2, 2, 2, 13, 3, 2, 2, 3, 2, 2, 3, 2, 14, 2, 2, 17, 17, 2, 2, 1, 2, 3, 1, 2, 1, 1, 15, 2, 2, 2, 3, 13, 2, 2, 13, 2, 2, 2, 3, 3, 2, 3, 2, 2, 13, 7, 3, 2, 3, 2, 14, 2, 3, 2, 2, 2, 13, 2, 3, 3, 2, 14, 14, 2, 2, 2, 3, 2, 2, 2, 2, 13, 2, 2, 2, 13, 2, 2, 2, 2, 2, 3, 2, 13, 2, 2, 2, 2, 2, 2, 14, 3, 2, 2, 2, 2, 17, 1, 3, 2, 2, 2, 13, 2, 3, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 2, 3, 2, 13, 2, 2, 2, 3, 3, 3, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 16, 2, 2, 4, 2, 2, 16, 16, 2, 2, 3, 2, 2, 2, 3, 13, 2, 2, 3, 2, 2, 2, 1, 2, 2, 3, 2, 3, 3, 2, 2, 2, 2, 2, 3, 3, 2, 4, 2, 2, 4, 3, 2, 2, 3, 2, 3, 2, 3, 3, 3, 2, 2, 3, 14, 2, 2, 2, 4, 2, 3, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 15, 3, 14, 2, 2, 2, 3, 2, 2, 1, 3, 3, 2, 3, 2, 2, 3, 3, 14, 3, 2, 13, 2, 2, 2, 3, 2, 3, 2, 2, 3, 2, 3, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 3, 2, 2, 13, 3, 2, 2, 3, 2, 3, 2, 2, 3, 3, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 13, 2, 13, 2, 1, 1, 14, 3, 2, 2, 2, 1, 13, 2, 2, 13, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 14, 2, 2, 13, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 14, 2, 2, 13, 2, 2, 2, 3, 2, 2, 2, 2, 3, 1, 1, 2, 3, 2, 2, 2, 2, 3, 3, 2, 13, 2, 14, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 1, 2, 2, 2, 3, 2, 2, 1, 1, 1, 3, 2, 3, 2, 2, 3, 2, 3, 2, 3, 2, 2, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 2, 1, 2, 1, 3, 2, 2, 1, 2, 2, 2, 2, 13, 13, 2, 2, 1, 1, 2, 1, 3, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 1, 2, 2, 3, 3, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 1, 2, 1, 2, 2, 2, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 3, 2, 3, 3, 2, 1, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 1, 1, 1, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 3, 2, 3, 2, 2, 14, 15, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 17, 2, 2, 1, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 14, 2, 2, 2, 13, 2, 3, 2, 2, 1, 2, 13, 1, 2, 1, 2, 2, 1, 2, 2, 13, 2, 3, 3, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 14, 2, 17, 13, 1, 3, 2, 3, 2, 1, 3, 3, 2, 2, 2, 2, 2, 13, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 14, 13, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 1, 2, 3, 2, 16, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 3, 2, 2, 1, 2, 2, 2, 2, 2, 3, 1, 13, 1, 2, 1, 2, 2, 13, 1, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 3, 14, 3, 2, 1, 2, 1, 2, 2, 1, 1, 1, 2, 2, 1, 2, 2, 2, 1, 2, 3, 1, 2, 3, 2, 1, 1, 2, 13, 2, 2, 2, 2, 3, 3, 2, 2, 1, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 3, 16, 2, 2, 1, 2, 2, 2, 2, 3, 1, 3, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 14, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 13, 2, 2, 2, 1, 2, 2, 2, 1, 2, 1, 2, 2, 2, 1, 2, 3, 2, 1, 2, 2, 1, 2, 2, 3, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 1, 2, 2, 3, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 1, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 13, 2, 2, 2, 2, 2, 1, 14, 2, 2, 2, 13, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 13, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 13, 2, 2, 2, 14, 1, 2, 2, 2, 1, 2, 3, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 13, 13, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 3, 3, 2, 3, 2, 2, 2, 2, 2, 3, 2, 2, 3, 2, 2, 2, 2, 2, 3, 2, 1, 1, 2, 2, 1, 2, 1, 1, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 14, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2, 2, 2, 13, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 14, 1, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 1, 1, 13, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2, 2, 2, 1, 1, 2, 1, 1, 13, 7, 2, 1, 1, 2, 1, 1, 1, 2, 2, 7, 3, 1, 1, 1, 3, 2, 2, 2, 3, 2, 2, 2, 1, 2, 2, 2, 2, 13, 3, 7, 2, 7, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 2, 14, 5, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 4, 2, 2, 1, 2, 1, 2, 2, 2, 17, 1, 1, 1, 1, 1, 2, 3, 2, 2, 1, 1, 2, 2, 1, 1, 1, 2, 1, 1, 1, 6, 1, 2, 3, 2, 2, 2, 3, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 7, 17, 1, 3, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 1, 1, 1, 1, 2, 14, 2, 2, 1, 7, 14, 1, 2, 1, 1, 3, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 3, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 3, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 13, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 1, 1, 7, 1, 1, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 2, 4, 3, 2, 3, 1, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 2, 3, 2, 3, 2, 2, 2, 2, 2, 1, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 3, 1, 1, 2, 2, 2, 1, 1, 2, 7, 7, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 7, 2, 1, 2, 1, 2, 3, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 3, 2, 2, 3, 2, 2, 1, 2, 3, 2, 2, 2, 2, 3, 1, 2, 2, 1, 2, 1, 1, 3, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 17, 2, 14, 2, 2, 2, 2, 14, 2, 2, 2, 3, 2, 1, 2, 2, 2, 2, 3, 3, 1, 3, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 2, 2, 2, 2, 1, 2, 3, 1, 7, 2, 3, 2, 2, 2, 2, 2, 7, 2, 2, 3, 2, 4, 2, 2, 3, 2, 3, 2, 2, 3, 7, 2, 2, 2, 5, 3, 2, 2, 2, 2, 2, 2, 14, 3, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 3, 2, 1, 2, 1, 2, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 1, 1, 2, 1, 1, 7, 1, 1, 2, 2, 17, 2, 2, 1, 1, 2, 17, 2, 1, 13, 1, 17, 7, 2, 1, 2, 1, 13, 2, 1, 2, 2, 2, 1, 1, 2, 14, 2, 2, 17, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 3, 14, 2, 7, 1, 4, 2, 17, 13, 7, 2, 1, 2, 2, 2, 1, 2, 1, 1, 7, 13, 7, 1, 2, 13, 1, 2, 2, 2, 7, 1, 2, 2, 2, 14, 1, 2, 2, 1, 7, 2, 1, 2, 2, 2, 2, 2, 14, 2, 3, 1, 1, 2, 2, 14, 2, 2, 2, 7, 2, 17, 1, 14, 2, 2, 2, 2, 17, 2, 3, 3, 2, 1, 2, 2, 1, 1, 2, 2, 2, 1, 1, 2, 7, 7, 13, 1, 2, 2, 2, 3, 1, 3, 2, 5, 14, 7, 2, 1, 1, 7, 1, 1, 7, 1, 1, 2, 2, 17, 1, 2, 7, 7, 2, 1, 1, 13, 7, 1, 2, 1, 1, 1, 2, 2, 1, 17, 7, 1, 1, 1, 2, 7, 1, 1, 17, 1, 17, 1, 2, 1, 1, 1, 2, 13, 13, 2, 15, 7, 2, 7, 7, 2, 2, 7, 2, 1, 0, 2, 2, 14, 2, 1, 2, 1, 2, 1, 14, 14, 3, 3, 17, 2, 1, 1, 1, 1, 1, 1, 3, 2, 1, 14, 2, 1, 2, 1, 1, 3, 3, 1, 2, 1, 2, 1, 3, 3, 2, 1, 2, 2, 3, 2, 2, 3, 2, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 13, 14, 1, 3, 2, 17, 1, 2, 2, 2, 13, 2, 1, 2, 2, 1, 2, 1, 2, 1, 2, 15, 2, 2, 1, 1, 2, 2, 4, 2, 1, 2, 1, 2, 2, 3, 2, 2, 2, 2, 1, 2, 2, 3, 2, 3, 1, 2, 2, 2, 1, 2, 7, 2, 1, 1, 3, 2, 14, 2, 2, 2, 2, 3, 1, 1, 1, 13, 1, 2, 2, 3, 14, 12, 14, 2, 2, 1, 2, 1, 1, 15, 1, 2, 3, 2, 3, 14, 1, 2, 2, 1, 2, 7, 2, 1, 2, 1, 1, 2, 2, 1, 2, 13, 3, 2, 2, 1, 2, 1, 2, 3, 2, 3, 1, 1, 3, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 13, 2, 1, 12, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 14, 1, 2, 3, 2, 3, 2, 1, 2, 2, 2, 2, 2, 2, 3, 3, 2, 2, 13, 3, 3, 3, 2, 3, 2, 1, 3, 1, 2, 2, 7, 2, 1, 2, 1, 2, 2, 3, 2, 14, 3, 2, 2, 1, 2, 2, 2, 3, 2, 2, 2, 3, 1, 1, 2, 14, 1, 2, 1, 1, 2, 2, 2, 2, 2, 15, 2, 3, 3, 2, 2, 3, 1, 1, 2, 2, 2, 3, 2, 14, 2, 1, 3, 3, 2, 2, 3, 2, 2, 2, 3, 4, 2, 2, 2, 12, 15, 2, 14, 14, 1, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 1, 2, 3, 7, 2, 2, 13, 2, 13, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 3, 2, 1, 13, 2, 3, 14, 3, 2, 1, 14, 3, 2, 2, 1, 3, 2, 2, 2, 2, 2, 2, 2, 2, 3, 1, 3, 2, 2, 3, 2, 1, 14, 2, 14, 2, 2, 3, 2, 2, 1, 2, 2, 2, 2, 14, 2, 2, 2, 2, 1, 3, 2, 1, 1, 2, 2, 2, 2, 14, 2, 2, 14, 13, 2, 1, 3, 2, 1, 2, 3, 1, 1, 3, 2, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2, 2, 7, 2, 1, 2, 2, 7, 2, 2, 1, 14, 1, 3, 13, 2, 2, 1, 15, 2, 2, 2, 2, 2, 2, 1, 1, 13, 2, 2, 4, 2, 3, 2, 3, 1, 2, 2, 2, 2, 1, 2, 1, 1, 1, 1, 1, 3, 1, 1, 3, 14, 3, 2, 1, 2, 2, 2, 1, 2, 1, 2, 13, 1, 2, 2, 2, 2, 1, 2, 14, 1, 2, 2, 1, 2, 1, 1, 1, 3, 2, 2, 2, 13, 2, 1, 2, 13, 2, 2, 2, 3, 1, 1, 1, 1, 1, 14, 3, 1, 2, 2, 3, 2, 3, 2, 1, 2, 1, 14, 2, 1, 2, 1, 2, 2, 2, 2, 13, 14, 2, 13, 1, 2, 2, 2, 2, 4, 1, 1, 2, 14, 1, 2, 1, 3, 2, 1, 1, 1, 2, 2, 2, 6, 2, 2, 2, 14, 2, 2, 14, 2, 2, 1, 1, 1, 1, 2, 17, 2, 1, 2, 1, 3, 12, 1, 2, 2, 2, 1, 3, 1, 1, 2, 1, 2, 1, 2, 1, 1, 1, 14, 2, 13, 13, 1, 2, 13, 1, 1, 1, 2, 14, 1, 3, 3, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 13, 1, 1, 1, 12, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 6, 1, 13, 2, 1, 13, 13, 2, 1, 1, 1, 1, 1, 13, 3, 2, 2, 2, 2, 1, 2, 1, 2, 6, 1, 1, 3, 1, 17, 1, 12, 2, 3, 2, 1, 2, 1, 1, 1, 2, 3, 13, 3, 3, 3, 2, 1, 13, 1, 1, 14, 7, 2, 17, 1, 1, 1, 1, 14, 1, 12, 2, 7, 1, 12, 2, 2, 1, 2, 14, 13, 1, 3, 13, 2, 1, 2, 1, 2, 2, 1, 14, 2, 3, 1, 1, 17, 1, 1, 13, 1, 2, 2, 1, 13, 1, 3, 17, 1, 1, 1, 2, 13, 1, 1, 14, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 2, 17, 1, 2, 1, 17, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 3, 1, 13, 1, 1, 2, 1, 2, 1, 1, 1, 2, 1, 16, 6, 2, 1, 2, 1, 1, 17, 1, 3, 1, 2, 2, 2, 1, 2, 1, 2, 1, 3, 1, 2, 2, 17, 2, 1, 3, 1, 2, 1, 1, 12, 13, 2, 2, 2, 1, 1, 2, 2, 1, 2, 1, 2, 2, 1, 12, 13, 3, 1, 1, 1, 2, 17, 2, 2, 1, 1, 1, 1, 1, 12, 1, 2, 2, 1, 1, 1, 13, 1, 1, 1, 1, 13, 1, 14, 1, 1, 1, 1, 1, 1, 2, 2, 13, 1, 2, 1, 2, 2, 2, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 1, 2, 1, 1, 1, 1, 2, 14, 17, 2, 2, 2, 17, 1, 2, 2, 13, 1, 1, 2, 13, 2, 2, 1, 1, 2, 1, 1, 2, 1, 2, 2, 1, 1, 1, 1, 1, 1, 17, 1, 17, 3, 3, 2, 2, 1, 13, 1, 13, 12, 2, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 2, 2, 12, 1, 17, 2, 12, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 7, 2, 2, 1, 2, 2, 1, 3, 2, 1, 1, 1, 2, 2, 17, 3, 1, 2, 2, 2, 1, 3, 17, 2, 1, 2, 1, 2, 1, 2, 1, 1, 1, 2, 12, 17, 1, 1, 2, 1, 1, 1, 2, 13, 2, 2, 1, 1, 15, 2, 17, 1, 14, 1, 1, 1, 3, 1, 2, 2, 2, 2, 2, 3, 1, 1, 2, 1, 1, 1, 2, 1, 13, 7, 1, 1, 17, 2, 14, 2, 1, 1, 2, 4, 1, 2, 1, 1, 14, 2, 2, 1, 2, 17, 17, 2, 1, 1, 2, 17, 17, 17, 2, 1, 1, 2, 3, 2, 2, 13, 3, 13, 1, 17, 17, 17, 17, 2, 7, 2, 1, 17, 1, 2, 1, 15, 3, 13, 1, 3, 1, 2, 12, 15, 3, 2, 1, 1, 3, 2, 1, 2, 1, 2, 17, 2, 1, 3, 1, 1, 3, 1, 1, 1, 2, 15, 17, 1, 13, 12, 2, 2, 1, 1, 13, 1, 1, 1, 17, 1, 1, 1, 2, 12, 2, 1, 1, 1, 1, 17, 1, 17, 1, 2, 12, 17, 17, 2, 3, 14, 15, 2, 14, 17, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 14, 1, 1, 17, 2, 2, 12, 1, 1, 1, 1, 1, 1, 1, 14, 17, 1, 1, 2, 1, 17, 3, 1, 1, 2, 13, 3, 1, 1, 1, 1, 1, 13, 1, 1, 1, 13, 1, 2, 1, 3, 2, 13, 7, 1, 14, 14, 14, 13, 17, 13, 1, 3, 1, 17, 17, 13, 1, 1, 1, 12, 17, 1, 1, 13, 15, 1, 2, 1, 1, 1, 1, 2, 1, 1, 12, 12, 13, 1, 1, 2, 12, 12, 1, 3, 17, 2, 12, 17, 1, 1, 3, 2, 1, 3, 12, 2, 2, 3, 2, 14, 14, 1, 1, 2, 12, 1, 12, 12, 14, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 13, 13, 17, 2, 1, 1, 1, 2, 1, 15, 2, 17, 2, 2, 1, 1, 2, 1, 14, 2, 1, 2, 1, 1, 3, 2, 3, 1, 2, 3, 2, 1, 2, 17, 1, 2, 2, 1, 2, 2, 3, 15, 1, 3, 13, 3, 1, 1, 2, 3, 14, 1, 13, 2, 2, 1, 14, 2, 1, 2, 2, 1, 17, 12, 12, 1, 1, 2, 1, 1, 1, 2, 2, 13, 13, 1, 17, 1, 2, 17, 1, 1, 1, 1, 1, 1, 2, 14, 1, 1, 3, 13, 12, 12, 2, 1, 1, 2, 2, 3, 3, 1, 3, 1, 3, 1, 1, 2, 2, 13, 1, 2, 2, 12, 12, 2, 2, 12, 17, 1, 17, 2, 12, 17, 17, 1, 17, 12, 17, 1, 17, 2, 1, 13, 2, 2, 1, 1, 2, 2, 1, 1, 17, 2, 3, 17, 3, 1, 1, 1, 2, 1, 12, 3, 2, 2, 12, 17, 17, 12, 12, 13, 2, 3, 17, 2, 1, 3, 2, 2, 3, 17, 17, 12, 2, 3, 2, 2, 2, 3, 1, 3, 2, 1, 12, 2, 2, 3, 2, 3, 1, 2, 2, 17, 12, 7, 12, 12, 1, 1, 1, 2, 2, 1, 1, 1, 12, 13, 17, 17, 1, 17, 1, 1, 1, 1, 13, 1, 2, 17, 2, 1, 2, 2, 2, 2, 2, 12, 3, 1, 1, 2, 2, 2, 2, 1, 1, 13, 14, 1, 1, 2, 2, 2, 13, 17, 2, 1, 2, 15, 2, 2, 1, 2, 13, 1, 2, 1, 2, 17, 2, 1, 2, 2, 1, 6, 13, 1, 1, 1, 1, 2, 1, 1, 2, 1, 17, 2, 2, 13, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 2, 2, 3, 1, 1, 17, 1, 3, 1, 2, 17, 2, 2, 1, 1, 2, 13, 1, 1, 1, 1, 1, 2, 2, 1, 2, 13, 13, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 1, 1, 1, 2, 1, 2, 2, 2, 1, 2, 2, 13, 2, 1, 1, 1, 2, 14, 5, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 13, 1, 2, 2, 2, 2, 2, 2, 1, 1, 3, 2, 4, 2, 2, 1, 15, 2, 2, 1, 12, 1, 3, 1, 14, 1, 2, 1, 1, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 2, 1, 1, 2, 1, 2, 3, 3, 2, 2, 1, 2, 2, 1, 17, 1, 2, 15, 2, 1, 13, 2, 1, 1, 3, 2, 1, 14, 2, 1, 2, 2, 1, 2, 14, 1, 2, 16, 2, 2, 2, 1, 1, 5, 1, 2, 1, 3, 2, 2, 2, 2, 13, 2, 5, 2, 1, 2, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 2, 3, 2, 13, 3, 2, 2, 2, 2, 1, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 1, 12, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 3, 2, 2, 1, 13, 1, 1, 1, 1, 3, 1, 15, 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1, 13, 2, 14, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 3, 1, 2, 2, 17, 13, 1, 2, 1, 2, 1, 2, 1, 1, 2, 1, 2, 2, 1, 2, 2, 1, 17, 2, 1, 1, 3, 2, 2, 1, 2, 1, 1, 2, 2, 1, 2, 1, 1, 13, 2, 12, 2, 1, 15, 2, 1, 3, 1, 1, 13, 1, 2, 1, 7, 2, 1, 1, 2, 1, 14, 13, 1, 2, 1, 17, 2, 1, 1, 2, 2, 1, 1, 2, 2, 1, 2, 13, 2, 1, 3, 1, 1, 12, 1, 17, 1, 1, 1, 13, 1, 2, 1, 1, 1, 1, 2, 16, 1, 6, 1, 3, 1, 2, 1, 1, 12, 1, 12, 1, 1, 1, 1, 1, 1, 6, 1, 1, 17, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 13, 1, 1, 1, 1, 12, 1, 1, 12, 17, 1, 1, 1, 1, 1, 2, 1, 1, 12, 1, 12, 1, 1, 1, 1, 13, 1, 1, 1, 1, 12, 2, 13, 1, 1, 17, 1, 1, 1, 1, 17, 1, 1, 12, 2, 1, 1, 1, 1, 1, 1, 12, 1, 1, 2, 13, 1, 1, 17, 1, 1, 13, 1, 12, 1, 1, 15, 2, 14, 12, 1, 1, 12, 1, 6, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 15, 1, 1, 12, 1, 1, 2, 14, 1, 1, 1, 1, 12, 1, 13, 15, 13, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 12, 1, 2, 1, 1, 1, 1, 2, 2, 1, 13, 1, 1, 1, 1, 13, 1, 1, 1, 1, 2, 1, 12, 1, 12, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 12, 13, 1, 1, 1, 15, 1, 17, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 13, 1, 17, 1, 1, 1, 15, 12, 1, 14, 1, 1, 14, 1, 1, 1, 1, 1, 1, 1, 1, 13, 1, 13, 1, 7, 1, 12, 14, 1, 13, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 14, 1, 1, 15, 1, 1, 12, 1, 13, 1, 1, 1, 1, 14, 1, 13, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 1, 12, 1, 1, 1, 13, 15, 13, 1, 1, 12, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 1, 17, 16, 1, 1, 1, 1, 1, 1, 1, 12, 14, 1, 13, 1, 12, 1, 14, 1, 1, 1, 1, 1, 1, 14, 2, 1, 1, 1, 1, 1, 1, 1, 1, 12, 12, 1, 1, 1, 1, 1, 1, 14, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 12, 13, 1, 14, 1, 13, 1, 1, 12, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 13, 1, 1, 1, 1, 1, 1, 1, 12, 1, 1, 1, 1, 1, 17, 6, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 13, 1, 1, 13, 12, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 12, 1, 1, 1, 1, 1, 1, 12, 1, 12, 1, 1, 1, 1, 15, 1, 6, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 6, 1, 1, 2, 1, 2, 7, 1, 17, 2, 1, 1, 1, 1, 1, 2, 12, 2, 2, 2, 1, 14, 2, 17, 15, 1, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 15, 17, 7, 2, 17, 17, 2, 1, 3, 1, 2, 2, 17, 13, 14, 1, 2, 1, 1, 2, 14, 2, 13, 1, 1, 1, 15, 7, 1, 2, 1, 1, 2, 1, 3, 1, 1, 17, 1, 13, 17, 13, 17, 3, 1, 1, 14, 2, 2, 2, 2, 14, 2, 2, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 17, 1, 1, 17, 1, 2, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 3, 1, 17, 1, 1, 13, 17, 1, 2, 1, 2, 2, 1, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 13, 1, 12, 1, 17, 1, 2, 1, 3, 1, 1, 2, 2, 1, 1, 2, 1, 17, 14, 1, 2, 2, 2, 1, 17, 17, 13, 1, 1, 1, 2, 17, 2, 2, 2, 1, 2, 2, 17, 1, 2, 1, 2, 1, 2, 2, 1, 2, 1, 1, 2, 1, 1, 2, 2, 1, 3, 1, 1, 14, 1, 1, 1, 14, 1, 1, 1, 1, 1, 13, 17, 2, 1, 2, 2, 1, 2, 17, 1, 1, 1, 1, 14, 1, 2, 1, 1, 17, 2, 1, 1, 1, 1, 17, 1, 1, 1, 2, 1, 2, 1, 2, 2, 2, 7, 1, 1, 17, 17, 2, 2, 1, 13, 2, 1, 1, 1, 1, 1, 1, 7, 17, 1, 2, 1, 1, 1, 2, 1, 1, 2, 1, 1, 1, 2, 1, 1, 13, 1, 1, 2, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 2, 1, 12, 1, 3, 2, 2, 2, 1, 3, 2, 15, 1, 2, 1, 3, 13, 1, 1, 2, 1, 1, 1, 7, 7, 1, 2, 1, 1, 1, 1, 1, 15, 14, 1, 2, 17, 1, 3, 2, 12, 13, 1, 13, 17, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 7, 17, 1, 2, 15, 1, 2, 7, 1, 1, 2, 1, 17, 1, 1, 17, 2, 13, 1, 1, 1, 2, 2, 13, 1, 1, 1, 1, 1, 17, 1, 2, 2, 14, 1, 13, 1, 1, 2, 1, 1, 1, 13, 1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 2, 2, 1, 2, 3, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 17, 1, 2, 1, 1, 3, 1, 1, 2, 2, 3, 1, 1, 2, 2, 1, 2, 1, 2, 2, 3, 1, 2, 1, 3, 2, 2, 1, 1, 1, 13, 1, 1, 1, 1, 2, 1, 1, 1, 3, 3, 1, 1, 1, 2, 1, 1, 2, 2, 2, 2, 1, 1, 13, 1, 1, 7, 2, 2, 1, 2, 13, 2, 1, 1, 2, 1, 1, 15, 1, 2, 12, 2, 1, 13, 1, 1, 13, 1, 1, 1, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 1, 1, 2, 13, 2, 1, 1, 1, 2, 1, 2, 1, 13, 13, 2, 1, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 13, 2, 1, 1, 2, 15, 1, 2, 1, 1, 2, 1, 1, 13, 1, 1, 2, 13, 1, 1, 2, 1, 1, 2, 1, 1, 1, 2, 1, 13, 2, 2, 2, 15, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 13, 2, 1, 1, 7, 2, 2, 1, 3, 2, 2, 13, 1, 13, 1, 15, 1, 1, 2, 1, 1, 1, 1, 2, 1, 3, 2, 2, 2, 1, 2, 15, 2, 1, 1, 2, 1, 1, 1, 2, 1, 2, 3, 1, 13, 2, 2, 15, 2, 13, 1, 13, 2, 2, 2, 3, 1, 2, 1, 15, 2, 13, 1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 1, 1, 1, 7, 2, 2, 1, 1, 2, 15, 3, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 2, 2, 17, 2, 2, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 1, 2, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 3, 13, 1, 1, 1, 1, 1, 2, 1, 1, 13, 2, 1, 1, 2, 1, 1, 1, 15, 1, 1, 2, 1, 1, 2, 1, 14, 1, 14, 2, 13, 1, 2, 2, 2, 2, 2, 1, 2, 13, 1, 2, 1, 2, 1, 1, 1, 2, 1, 2, 14, 1, 1, 1, 2, 3, 1, 2, 1, 2, 2, 1, 1, 1, 1, 2, 1, 12, 1, 13, 12, 3, 14, 1, 1, 1, 1, 1, 2, 14, 1, 1, 1, 1, 2, 1, 1, 2, 1, 2, 1, 1, 3, 2, 1, 1, 1, 1, 17, 2, 2, 1, 2, 1, 1, 1, 2, 3, 3, 3, 1, 2, 2, 2, 3, 2, 3, 1, 1, 1, 2, 1, 1, 1, 2, 1, 2, 2, 1, 1, 1, 2, 1, 3, 2, 1, 1, 2, 1, 1, 1, 1, 3, 1, 1, 2, 3, 2, 3, 3, 2, 2, 1, 1, 2, 2, 2, 1, 1, 1, 2, 2, 1, 2, 1, 13, 1, 1, 3, 3, 2, 3, 12, 13, 1, 2, 1, 1, 13, 1, 13, 14, 2, 1, 2, 3, 3, 1, 1, 1, 1, 1, 13, 1, 1, 2, 1, 1, 1, 1, 1, 13, 1, 1, 15, 13, 1, 2, 1, 2, 1, 1, 2, 1, 1, 1, 1, 13, 2, 1, 1, 2, 2, 1, 13, 14, 1, 1, 2, 1, 1, 13, 2, 12, 13, 13, 2, 13, 1, 1, 1, 13, 1, 2, 1, 1, 1, 1, 2, 1, 13, 2, 1, 1, 1, 13, 13, 1, 2, 17, 2, 1, 1, 1, 1, 14, 1, 1, 2, 1, 12, 2, 1, 2, 1, 17, 1, 1, 1, 13, 1, 2, 17, 1, 2, 13, 1, 1, 14, 17, 1, 13, 2, 1, 2, 1, 1, 1, 12, 1, 17, 3, 2, 5, 13, 1, 1, 1, 7, 13, 1, 1, 2, 2, 1, 1, 2, 1, 1, 1, 2, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 13, 2, 1, 1, 14, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 17, 1, 1, 1, 2, 1, 13, 1, 2, 1, 17, 14, 1, 1, 1, 2, 1, 12, 1, 13, 1, 2, 1, 1, 1, 2, 13, 1, 3, 12, 17, 1, 2, 1, 1, 1, 12, 2, 12, 13, 1, 17, 2, 2, 2, 1, 1, 1, 7, 1, 1, 2, 2, 1, 1, 2, 2, 1, 2, 13, 1, 1, 1, 15, 2, 17, 1, 2, 13, 1, 1, 13, 17, 1, 1, 1, 2, 14, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 14, 1, 17, 1, 2, 2, 2, 2, 14, 17, 17, 17, 17, 2, 17, 1, 1, 2, 2, 13, 12, 1, 13, 1, 1, 2, 1, 2, 13, 12, 2, 1, 1, 17, 1, 1, 1, 1, 3, 1, 17, 1, 1, 2, 1, 2, 1, 1, 17, 2, 1, 17, 14, 17, 1, 2, 1, 14, 1, 3, 1, 13, 2, 1, 1, 2, 17, 1, 13, 1, 1, 17, 1, 1, 1, 13, 2, 14, 17, 1, 2, 17, 2, 2, 17, 17, 1, 17, 17, 14, 17, 1, 1, 2, 14, 17, 1, 2, 2, 17, 1, 17, 1, 17, 2, 17, 17, 13, 17, 17, 1, 2, 1, 17, 1, 13, 17, 13, 17, 2, 13, 17, 17, 1, 17, 17, 2, 1, 13, 1, 2, 17, 14, 2, 1, 1, 2, 17, 1, 2, 2, 17, 12, 17, 2, 17, 13, 1, 2, 13, 2, 14, 2, 3, 17, 15, 14, 1, 12, 1, 2, 1, 2, 1, 17, 2, 13, 13, 1, 1, 17, 2, 1, 1, 1, 1, 2, 12, 14, 14, 14, 1, 2, 17, 13, 15, 1, 1, 13, 1, 14, 14, 2, 14, 1, 2, 1, 2, 2, 2, 1, 1, 16, 17, 1, 2, 12, 15, 1, 1, 1, 2, 2, 17, 2, 13, 17, 17, 1, 17, 17, 17, 12, 1, 17, 1, 7, 17, 17, 2, 2, 1, 1, 17, 17, 1, 2, 2, 17, 2, 17, 2, 14, 17, 1, 12, 17, 7, 17, 2, 13, 13, 1, 1, 17, 1, 17, 17, 17, 1, 7, 17, 1, 13, 1, 2, 2, 13, 13, 1, 12, 1, 1, 12, 12, 12, 12, 17, 1, 12, 2, 3, 14, 14, 2, 2, 15, 1, 1, 17, 2, 13, 13, 2, 14, 2, 5, 2, 1, 1, 14, 2, 1, 1, 13, 2, 3, 1, 1, 12, 1, 13, 1, 1, 2, 1, 13, 1, 2, 1, 12, 12, 1, 1, 2, 2, 1, 2, 2, 14, 14, 2, 2, 2, 2, 1, 2, 2, 15, 13, 6, 1, 1, 1, 1, 12, 2, 12, 14, 1, 1, 1, 1, 12, 1, 2, 2, 1, 12, 1, 1, 2, 1, 1, 1, 12, 1, 1, 12, 15, 1, 14, 1, 1, 1, 2, 13, 14, 1, 1, 6, 1, 12, 1, 1, 15, 1, 2, 2, 12, 12, 1, 1, 2, 2, 12, 1, 1, 1, 1, 1, 13, 1, 1, 12, 1, 1, 2, 2, 17, 12, 2, 15, 1, 1, 14, 2, 1, 17, 1, 13, 1, 12, 12, 2, 1, 1, 1, 1, 1, 15, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 1, 13, 1, 1, 1, 12, 1, 15, 2, 12, 1, 15, 1, 1, 1, 1, 1, 2, 2, 2, 14, 1, 2, 3, 1, 2, 1, 2, 2, 1, 1, 2, 1, 1, 1, 1, 1, 1, 3, 1, 2, 2, 1, 1, 1, 2, 15, 1, 2, 15, 2, 12, 2, 13, 1, 1, 17, 13, 1, 12, 2, 1, 1, 1, 1, 1, 14, 1, 12, 2, 1, 1, 5, 2, 2, 13, 13, 14, 1, 15, 2, 2, 2, 13, 2, 1, 2, 5, 1, 1, 1, 2, 1, 1, 1, 1, 2, 2, 1, 3, 1, 1, 2, 1, 2, 2, 15, 12, 1, 1, 1, 1, 17, 1, 2, 2, 3, 1, 1, 2, 2, 2, 17, 1, 1, 1, 1, 12, 1, 2, 3, 17, 1, 1, 1, 3, 1, 15, 1, 12, 12, 2, 2, 2, 1, 1, 2, 1, 1, 1, 14, 2, 2, 15, 1, 2, 2, 1, 1, 15, 1, 1, 1, 1, 1, 2, 1, 1, 17, 2, 1, 12, 1, 2, 1, 1, 3, 1, 1, 1, 14, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 2, 17, 1, 1, 1, 1, 1, 12, 12, 2, 1, 1, 1, 1, 13, 2, 13, 2, 1, 2, 1, 2, 1, 1, 6, 1, 2, 1, 1, 14, 2, 15, 1, 6, 1, 2, 2, 1, 6, 1, 1 }; var interval = Interval1D.Create(0, feature.Length); Array.Sort(feature, targets); @@ -123,9 +122,6 @@ public void OnlyUniqueThresholdsSplitSearcher_FindBestSplit_Large() static double Weight(double v, double weight) { - if (v == 1.0) - return weight; - return 1.0; + return v == 1.0 ? weight : 1.0; } - } diff --git a/src/SharpLearning.DecisionTrees.Test/TreeBuilders/BestFirstTreeBuilderTest.cs b/src/SharpLearning.DecisionTrees.Test/TreeBuilders/BestFirstTreeBuilderTest.cs index 999c6f8d..e7470aad 100644 --- a/src/SharpLearning.DecisionTrees.Test/TreeBuilders/BestFirstTreeBuilderTest.cs +++ b/src/SharpLearning.DecisionTrees.Test/TreeBuilders/BestFirstTreeBuilderTest.cs @@ -30,7 +30,6 @@ public void BestFirstTreeBuilder_InvalidMaximumLeafCount() new GiniClassificationImpurityCalculator()); } - [TestMethod] [ExpectedException(typeof(ArgumentException))] public void BestFirstTreeBuilder_InvalidFeaturesPrSplit() @@ -67,7 +66,6 @@ public void BestFirstTreeBuilder_Build_Full_Tree() Assert.AreEqual(0.0, actual, 0.00001); } - [TestMethod] public void BestFirstTreeBuilder_Build_Leaf_Nodes_4() { diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ChildImpurities.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ChildImpurities.cs index 423251be..c708087f 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ChildImpurities.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ChildImpurities.cs @@ -8,17 +8,17 @@ namespace SharpLearning.DecisionTrees.ImpurityCalculators; public struct ChildImpurities : IEquatable { /// - /// + /// /// public readonly double Left; /// - /// + /// /// public readonly double Right; /// - /// + /// /// /// /// @@ -29,32 +29,28 @@ public ChildImpurities(double left, double right) } /// - /// + /// /// /// /// public bool Equals(ChildImpurities other) { if (!Equal(Left, other.Left)) { return false; } - if (!Equal(Right, other.Right)) { return false; } - - return true; + return Equal(Right, other.Right); } /// - /// + /// /// /// /// public override bool Equals(object obj) { - if (obj is ChildImpurities) - return Equals((ChildImpurities)obj); - return false; + return obj is ChildImpurities impurities && Equals(impurities); } /// - /// + /// /// /// /// @@ -65,7 +61,7 @@ public override bool Equals(object obj) } /// - /// + /// /// /// /// @@ -76,7 +72,7 @@ public override bool Equals(object obj) } /// - /// + /// /// /// public override int GetHashCode() @@ -84,16 +80,11 @@ public override int GetHashCode() return Left.GetHashCode() ^ Right.GetHashCode(); } - const double m_tolerence = 0.00001; + const double Tolerence = 0.00001; static bool Equal(double a, double b) { - var diff = Math.Abs(a * m_tolerence); - if (Math.Abs(a - b) <= diff) - { - return true; - } - - return false; + var diff = Math.Abs(a * Tolerence); + return Math.Abs(a - b) <= diff; } } diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs index 68db825b..211ba0ae 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/ClassificationImpurityCalculator.cs @@ -9,68 +9,68 @@ namespace SharpLearning.DecisionTrees.ImpurityCalculators; public abstract class ClassificationImpurityCalculator { /// - /// + /// /// protected Interval1D m_interval; /// - /// + /// /// protected int m_currentPosition; /// - /// + /// /// protected double m_weightedTotal = 0.0; /// - /// + /// /// protected double m_weightedLeft = 0.0; /// - /// + /// /// protected double m_weightedRight = 0.0; - internal TargetCounts m_weightedTargetCount = new(); - internal TargetCounts m_weightedTargetCountLeft = new(); - internal TargetCounts m_weightedTargetCountRight = new(); + internal TargetCounts WeightedTargetCount = new(); + internal TargetCounts WeightedTargetCountLeft = new(); + internal TargetCounts WeightedTargetCountRight = new(); /// - /// + /// /// protected double[] m_targets; /// - /// + /// /// protected double[] m_weights; /// - /// + /// /// protected double[] m_targetNames; /// - /// + /// /// protected int m_maxTargetNameIndex; /// - /// + /// /// protected int m_targetIndexOffSet; /// - /// + /// /// - public double WeightedLeft { get { return m_weightedLeft; } } + public double WeightedLeft => m_weightedLeft; /// - /// + /// /// - public double WeightedRight { get { return m_weightedRight; } } + public double WeightedRight => m_weightedRight; /// /// Initialize the calculator with targets, weights and work interval @@ -96,9 +96,9 @@ public void Init(double[] targetNames, double[] targets, double[] weights, Inter m_targetIndexOffSet *= -1; } - m_weightedTargetCount.Reset(m_maxTargetNameIndex, m_targetIndexOffSet); - m_weightedTargetCountLeft.Reset(m_maxTargetNameIndex, m_targetIndexOffSet); - m_weightedTargetCountRight.Reset(m_maxTargetNameIndex, m_targetIndexOffSet); + WeightedTargetCount.Reset(m_maxTargetNameIndex, m_targetIndexOffSet); + WeightedTargetCountLeft.Reset(m_maxTargetNameIndex, m_targetIndexOffSet); + WeightedTargetCountRight.Reset(m_maxTargetNameIndex, m_targetIndexOffSet); var w = 1.0; var weightsPresent = m_weights.Length != 0; @@ -110,10 +110,12 @@ public void Init(double[] targetNames, double[] targets, double[] weights, Inter for (var i = m_interval.FromInclusive; i < m_interval.ToExclusive; i++) { if (weightsPresent) + { w = weights[i]; + } var targetIndex = (int)targets[i]; - m_weightedTargetCount[targetIndex] += w; + WeightedTargetCount[targetIndex] += w; m_weightedTotal += w; } @@ -132,8 +134,8 @@ public void Reset() m_weightedLeft = 0.0; m_weightedRight = m_weightedTotal; - m_weightedTargetCountLeft.Clear(); - m_weightedTargetCountRight.SetCounts(m_weightedTargetCount); + WeightedTargetCountLeft.Clear(); + WeightedTargetCountRight.SetCounts(WeightedTargetCount); } /// @@ -145,7 +147,6 @@ public void UpdateInterval(Interval1D newInterval) Init(m_targetNames, m_targets, m_weights, newInterval); } - /// /// Updates impurity calculator with new split index /// @@ -165,11 +166,13 @@ public void UpdateIndex(int newPosition) for (var i = m_currentPosition; i < newPosition; i++) { if (weightsPresent) + { w = m_weights[i]; + } var targetIndex = (int)m_targets[i]; - m_weightedTargetCountLeft[targetIndex] += w; - m_weightedTargetCountRight[targetIndex] -= w; + WeightedTargetCountLeft[targetIndex] += w; + WeightedTargetCountRight[targetIndex] -= w; w_diff += w; } diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/GiniClassificationImpurityCalculator.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/GiniClassificationImpurityCalculator.cs index 61c4d48f..4d02c13f 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/GiniClassificationImpurityCalculator.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/GiniClassificationImpurityCalculator.cs @@ -6,7 +6,7 @@ public sealed class GiniClassificationImpurityCalculator : ClassificationImpurityCalculator, IImpurityCalculator { /// - /// + /// /// public GiniClassificationImpurityCalculator() { @@ -15,10 +15,7 @@ public GiniClassificationImpurityCalculator() /// /// Gets the unique target names /// - public double[] TargetNames - { - get { return m_targetNames; } - } + public double[] TargetNames => m_targetNames; /// /// Calculates child impurities with current split index @@ -32,8 +29,8 @@ public override ChildImpurities ChildImpurities() foreach (var targetValue in m_targetNames) { var targetIndex = (int)targetValue; - var leftCount = m_weightedTargetCountLeft[targetIndex]; - var rightCount = m_weightedTargetCountRight[targetIndex]; + var leftCount = WeightedTargetCountLeft[targetIndex]; + var rightCount = WeightedTargetCountRight[targetIndex]; giniLeft += leftCount * leftCount; giniRight += rightCount * rightCount; @@ -55,7 +52,7 @@ public override double NodeImpurity() foreach (var targetValue in m_targetNames) { - var value = m_weightedTargetCount[(int)targetValue]; + var value = WeightedTargetCount[(int)targetValue]; gini += value * value; } @@ -89,13 +86,12 @@ public override double LeafValue() foreach (var targetValue in m_targetNames) { - var value = m_weightedTargetCount[(int)targetValue]; + var value = WeightedTargetCount[(int)targetValue]; if (value > maxWeight) { maxWeight = value; bestTarget = targetValue; } - } return bestTarget; @@ -113,7 +109,7 @@ public override double[] LeafProbabilities() for (var i = 0; i < m_targetNames.Length; i++) { var targetValue = (int)m_targetNames[i]; - var targetProbability = (m_weightedTargetCount[targetValue] + 1) * probabilityFactor; + var targetProbability = (WeightedTargetCount[targetValue] + 1) * probabilityFactor; probabilities[i] = targetProbability; } diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/IImpurityCalculator.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/IImpurityCalculator.cs index 65d80fc3..b6f84443 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/IImpurityCalculator.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/IImpurityCalculator.cs @@ -74,9 +74,9 @@ public interface IImpurityCalculator double[] TargetNames { get; } /// - /// Calculates the probabilities based in the current work interval. + /// Calculates the probabilities based in the current work interval. /// Note that LeafProbabilities are only valid for classification impurity calculators. - /// Regression impurity calculators will return and empty result. The orders of the probabilities + /// Regression impurity calculators will return and empty result. The orders of the probabilities /// is the same as TargetNames. /// /// diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/RegressionImpurityCalculator.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/RegressionImpurityCalculator.cs index 0eaff84d..3a4b4f8f 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/RegressionImpurityCalculator.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/RegressionImpurityCalculator.cs @@ -35,24 +35,24 @@ public sealed class RegressionImpurityCalculator : IImpurityCalculator double[] m_weights; /// - /// + /// /// - public double WeightedLeft { get { return m_weightedLeft; } } + public double WeightedLeft => m_weightedLeft; /// - /// + /// /// - public double WeightedRight { get { return m_weightedRight; } } + public double WeightedRight => m_weightedRight; /// - /// + /// /// public RegressionImpurityCalculator() { } /// - /// Initialize the calculator with targets, weights and work interval + /// Initialize the calculator with targets, weights and work interval /// /// /// @@ -89,7 +89,9 @@ public void Init(double[] uniqueTargets, double[] targets, double[] weights, Int for (var i = m_interval.FromInclusive; i < m_interval.ToExclusive; i++) { if (weightsPresent) + { w = weights[i]; + } var targetValue = targets[i]; var wTarget = w * targetValue; @@ -156,7 +158,9 @@ public void UpdateIndex(int newPosition) for (var i = m_currentPosition; i < newPosition; i++) { if (weightsPresent) + { w = m_weights[i]; + } var targetValue = m_targets[i]; var wTarget = w * targetValue; @@ -236,10 +240,7 @@ public double LeafValue() /// /// Unique target names are not available for regression /// - public double[] TargetNames - { - get { return []; } - } + public double[] TargetNames => []; /// /// Probabilities are not available for regression diff --git a/src/SharpLearning.DecisionTrees/ImpurityCalculators/TargetCounts.cs b/src/SharpLearning.DecisionTrees/ImpurityCalculators/TargetCounts.cs index 8e7b6ac6..eaf61541 100644 --- a/src/SharpLearning.DecisionTrees/ImpurityCalculators/TargetCounts.cs +++ b/src/SharpLearning.DecisionTrees/ImpurityCalculators/TargetCounts.cs @@ -3,7 +3,7 @@ namespace SharpLearning.DecisionTrees.ImpurityCalculators; /// -/// Maintains weighted target counts. +/// Maintains weighted target counts. /// Offset is used for cases with negative target names like -1. /// This is a lot faster than mapping using a dictionary since this solution simply indexes into an array /// @@ -23,7 +23,7 @@ public TargetCounts(int size, int offset) m_counts = new double[Length]; } - public double[] Counts { get => m_counts; } + public double[] Counts => m_counts; public int OffSet { get; private set; } public int Length { get; private set; } @@ -42,8 +42,8 @@ public void Clear() } /// - /// Resets the size and off sets and clears - /// the counts + /// Resets the size and off sets and clears + /// the counts /// /// /// diff --git a/src/SharpLearning.DecisionTrees/Learners/ClassificationDecisionTreeLearner.cs b/src/SharpLearning.DecisionTrees/Learners/ClassificationDecisionTreeLearner.cs index a637e8ab..921ed22a 100644 --- a/src/SharpLearning.DecisionTrees/Learners/ClassificationDecisionTreeLearner.cs +++ b/src/SharpLearning.DecisionTrees/Learners/ClassificationDecisionTreeLearner.cs @@ -28,7 +28,7 @@ public sealed class ClassificationDecisionTreeLearner /// The minimum size /// The number of features to be selected between at each split /// The minimum improvement in information gain before a split is made - /// Seed for feature selection if number of features pr split is not equal + /// Seed for feature selection if number of features pr split is not equal /// to the total amount of features in observations. The features will be selected at random for each split public ClassificationDecisionTreeLearner(int maximumTreeDepth = 2000, int minimumSplitSize = 1, @@ -41,7 +41,7 @@ public ClassificationDecisionTreeLearner(int maximumTreeDepth = 2000, } /// - /// + /// /// /// /// @@ -52,7 +52,7 @@ public ClassificationDecisionTreeLearner(int maximumTreeDepth = 2000, } /// - /// Learns a classification tree from the provided observations and targets. + /// Learns a classification tree from the provided observations and targets. /// Weights can be provided in order to weight each sample individually /// /// diff --git a/src/SharpLearning.DecisionTrees/Learners/DecisionTreeLearner.cs b/src/SharpLearning.DecisionTrees/Learners/DecisionTreeLearner.cs index 0514f95d..636ef92e 100644 --- a/src/SharpLearning.DecisionTrees/Learners/DecisionTreeLearner.cs +++ b/src/SharpLearning.DecisionTrees/Learners/DecisionTreeLearner.cs @@ -17,7 +17,7 @@ public unsafe class DecisionTreeLearner readonly ITreeBuilder m_treeBuilder; /// - /// + /// /// /// public DecisionTreeLearner(ITreeBuilder treeBuilder) @@ -111,7 +111,7 @@ public BinaryTree Learn(F64MatrixView observations, double[] targets, int[] indi { if (weights.Length != targets.Length || weights.Length != observations.RowCount) { - throw new ArgumentException($"Weights length differ from observation row count and target length. " + + throw new ArgumentException("Weights length differ from observation row count and target length. " + $"Weights: {weights.Length}, observation: {observations.RowCount}, targets: {targets.Length}"); } } diff --git a/src/SharpLearning.DecisionTrees/Learners/RegressionDecisionTreeLearner.cs b/src/SharpLearning.DecisionTrees/Learners/RegressionDecisionTreeLearner.cs index 2c47a621..fcb42454 100644 --- a/src/SharpLearning.DecisionTrees/Learners/RegressionDecisionTreeLearner.cs +++ b/src/SharpLearning.DecisionTrees/Learners/RegressionDecisionTreeLearner.cs @@ -16,13 +16,13 @@ public sealed class RegressionDecisionTreeLearner : DecisionTreeLearner, IIndexedLearner, ILearner { /// - /// + /// /// /// The maximal tree depth before a leaf is generated /// The minimum size /// The number of features to be selected between at each split /// The minimum improvement in information gain before a split is made - /// Seed for feature selection if number of features pr split is not equal + /// Seed for feature selection if number of features pr split is not equal /// to the total amount of features in observations. The features will be selected at random for each split public RegressionDecisionTreeLearner(int maximumTreeDepth = 2000, int minimumSplitSize = 1, diff --git a/src/SharpLearning.DecisionTrees/Models/ClassificationDecisionTreeModel.cs b/src/SharpLearning.DecisionTrees/Models/ClassificationDecisionTreeModel.cs index bbd66d9c..56a57cc5 100644 --- a/src/SharpLearning.DecisionTrees/Models/ClassificationDecisionTreeModel.cs +++ b/src/SharpLearning.DecisionTrees/Models/ClassificationDecisionTreeModel.cs @@ -17,14 +17,14 @@ namespace SharpLearning.DecisionTrees.Models; public sealed class ClassificationDecisionTreeModel : IPredictorModel, IPredictorModel { /// - /// + /// /// public readonly BinaryTree Tree; readonly double[] m_variableImportance; /// - /// + /// /// /// public ClassificationDecisionTreeModel(BinaryTree tree) @@ -44,7 +44,7 @@ public double Predict(double[] observation) } /// - /// Predicts a set of observations + /// Predicts a set of observations /// /// /// @@ -123,7 +123,6 @@ public ProbabilityPrediction[] PredictProbability(F64Matrix observations, int[] return predictions; } - /// /// Returns the rescaled (0-100) and sorted variable importance scores with corresponding name /// diff --git a/src/SharpLearning.DecisionTrees/Models/RegressionDecisionTreeModel.cs b/src/SharpLearning.DecisionTrees/Models/RegressionDecisionTreeModel.cs index 48624ee5..24b475bf 100644 --- a/src/SharpLearning.DecisionTrees/Models/RegressionDecisionTreeModel.cs +++ b/src/SharpLearning.DecisionTrees/Models/RegressionDecisionTreeModel.cs @@ -16,13 +16,13 @@ namespace SharpLearning.DecisionTrees.Models; public sealed class RegressionDecisionTreeModel : IPredictorModel { /// - /// + /// /// public readonly BinaryTree Tree; readonly double[] m_variableImportance; /// - /// + /// /// /// public RegressionDecisionTreeModel(BinaryTree tree) @@ -42,7 +42,7 @@ public double Predict(double[] observation) } /// - /// Predicts a set of observations + /// Predicts a set of observations /// /// /// diff --git a/src/SharpLearning.DecisionTrees/Nodes/BinaryTree.cs b/src/SharpLearning.DecisionTrees/Nodes/BinaryTree.cs index f974a70c..2480d51c 100644 --- a/src/SharpLearning.DecisionTrees/Nodes/BinaryTree.cs +++ b/src/SharpLearning.DecisionTrees/Nodes/BinaryTree.cs @@ -5,7 +5,7 @@ namespace SharpLearning.DecisionTrees.Nodes; /// -/// Binary tree +/// Binary tree /// [Serializable] public sealed class BinaryTree @@ -30,9 +30,8 @@ public sealed class BinaryTree /// public readonly double[] VariableImportance; - /// - /// + /// /// /// /// @@ -80,14 +79,9 @@ double Predict(Node node, double[] observation) return node.Value; } - if (observation[node.FeatureIndex] <= node.Value) - { - return Predict(Nodes[node.LeftIndex], observation); - } - else - { - return Predict(Nodes[node.RightIndex], observation); - } + return observation[node.FeatureIndex] <= node.Value + ? Predict(Nodes[node.LeftIndex], observation) + : Predict(Nodes[node.RightIndex], observation); throw new InvalidOperationException("The tree is degenerated."); } @@ -105,14 +99,9 @@ Node PredictNode(Node node, double[] observation) return node; } - if (observation[node.FeatureIndex] <= node.Value) - { - return PredictNode(Nodes[node.LeftIndex], observation); - } - else - { - return PredictNode(Nodes[node.RightIndex], observation); - } + return observation[node.FeatureIndex] <= node.Value + ? PredictNode(Nodes[node.LeftIndex], observation) + : PredictNode(Nodes[node.RightIndex], observation); throw new InvalidOperationException("The tree is degenerated."); } @@ -138,14 +127,9 @@ ProbabilityPrediction PredictProbability(Node node, double[] observation) return new ProbabilityPrediction(node.Value, targetProbabilities); } - if (observation[node.FeatureIndex] <= node.Value) - { - return PredictProbability(Nodes[node.LeftIndex], observation); - } - else - { - return PredictProbability(Nodes[node.RightIndex], observation); - } + return observation[node.FeatureIndex] <= node.Value + ? PredictProbability(Nodes[node.LeftIndex], observation) + : PredictProbability(Nodes[node.RightIndex], observation); throw new InvalidOperationException("The tree is degenerated."); } diff --git a/src/SharpLearning.DecisionTrees/Nodes/DecisionNodeCreationItem.cs b/src/SharpLearning.DecisionTrees/Nodes/DecisionNodeCreationItem.cs index 1247638a..33293425 100644 --- a/src/SharpLearning.DecisionTrees/Nodes/DecisionNodeCreationItem.cs +++ b/src/SharpLearning.DecisionTrees/Nodes/DecisionNodeCreationItem.cs @@ -8,32 +8,32 @@ namespace SharpLearning.DecisionTrees.Nodes; public struct DecisionNodeCreationItem { /// - /// + /// /// public readonly int ParentIndex; /// - /// + /// /// public readonly NodePositionType NodeType; /// - /// + /// /// public readonly Interval1D Interval; /// - /// + /// /// public readonly double Impurity; /// - /// + /// /// public readonly int NodeDepth; /// - /// + /// /// /// /// diff --git a/src/SharpLearning.DecisionTrees/Nodes/Node.cs b/src/SharpLearning.DecisionTrees/Nodes/Node.cs index 0400aa6f..bd86890e 100644 --- a/src/SharpLearning.DecisionTrees/Nodes/Node.cs +++ b/src/SharpLearning.DecisionTrees/Nodes/Node.cs @@ -39,7 +39,7 @@ public struct Node public readonly int LeafProbabilityIndex; /// - /// + /// /// /// /// diff --git a/src/SharpLearning.DecisionTrees/Nodes/NodeExtensions.cs b/src/SharpLearning.DecisionTrees/Nodes/NodeExtensions.cs index d476037b..9b3ad72b 100644 --- a/src/SharpLearning.DecisionTrees/Nodes/NodeExtensions.cs +++ b/src/SharpLearning.DecisionTrees/Nodes/NodeExtensions.cs @@ -32,6 +32,5 @@ public static void UpdateParent(this List nodes, Node parent, Node child, default: throw new InvalidOperationException("Unsupported position type"); } - } } diff --git a/src/SharpLearning.DecisionTrees/Nodes/NodePositionType.cs b/src/SharpLearning.DecisionTrees/Nodes/NodePositionType.cs index 30d7ae10..b2866433 100644 --- a/src/SharpLearning.DecisionTrees/Nodes/NodePositionType.cs +++ b/src/SharpLearning.DecisionTrees/Nodes/NodePositionType.cs @@ -1,22 +1,22 @@ namespace SharpLearning.DecisionTrees.Nodes; /// -/// +/// /// public enum NodePositionType { /// - /// + /// /// Root, /// - /// + /// /// Left, /// - /// + /// /// - Right + Right, } diff --git a/src/SharpLearning.DecisionTrees/SplitSearchers/ISplitSearcher.cs b/src/SharpLearning.DecisionTrees/SplitSearchers/ISplitSearcher.cs index 510f4203..0037f440 100644 --- a/src/SharpLearning.DecisionTrees/SplitSearchers/ISplitSearcher.cs +++ b/src/SharpLearning.DecisionTrees/SplitSearchers/ISplitSearcher.cs @@ -9,7 +9,7 @@ namespace SharpLearning.DecisionTrees.SplitSearchers; public interface ISplitSearcher { /// - /// + /// /// /// /// diff --git a/src/SharpLearning.DecisionTrees/SplitSearchers/LinearSplitSearcher.cs b/src/SharpLearning.DecisionTrees/SplitSearchers/LinearSplitSearcher.cs index fb7a68df..98fb0558 100644 --- a/src/SharpLearning.DecisionTrees/SplitSearchers/LinearSplitSearcher.cs +++ b/src/SharpLearning.DecisionTrees/SplitSearchers/LinearSplitSearcher.cs @@ -5,8 +5,8 @@ namespace SharpLearning.DecisionTrees.SplitSearchers; /// -/// Searches for the best split using a brute force approach. The searcher only considers splits -/// when both the threshold value and the target value has changed. +/// Searches for the best split using a brute force approach. The searcher only considers splits +/// when both the threshold value and the target value has changed. /// The implementation assumes that the features and targets have been sorted /// together using the features as sort criteria /// @@ -16,8 +16,8 @@ public sealed class LinearSplitSearcher : ISplitSearcher readonly double m_minimumLeafWeight; /// - /// Searches for the best split using a brute force approach. The searcher only considers splits - /// when both the threshold value and the target value has changed. + /// Searches for the best split using a brute force approach. The searcher only considers splits + /// when both the threshold value and the target value has changed. /// The implementation assumes that the features and targets have been sorted /// together using the features as sort criteria /// @@ -28,8 +28,8 @@ public LinearSplitSearcher(int minimumSplitSize) } /// - /// Searches for the best split using a brute force approach. The searcher only considers splits - /// when both the threshold value and the target value has changed. + /// Searches for the best split using a brute force approach. The searcher only considers splits + /// when both the threshold value and the target value has changed. /// The implementation assumes that the features and targets have been sorted /// together using the features as sort criteria /// @@ -43,7 +43,7 @@ public LinearSplitSearcher(int minimumSplitSize, double minimumLeafWeight) } /// - /// Searches for the best split using a brute force approach. The searcher only considers splits + /// Searches for the best split using a brute force approach. The searcher only considers splits /// when both the threshold value and the target value has changed. /// The implementation assumes that the features and targets have been sorted /// together using the features as sort criteria @@ -57,7 +57,6 @@ public LinearSplitSearcher(int minimumSplitSize, double minimumLeafWeight) public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[] feature, double[] targets, Interval1D parentInterval, double parentImpurity) { - var bestSplitIndex = -1; var bestThreshold = 0.0; var bestImpurityImprovement = 0.0; diff --git a/src/SharpLearning.DecisionTrees/SplitSearchers/OnlyUniqueThresholdsSplitSearcher.cs b/src/SharpLearning.DecisionTrees/SplitSearchers/OnlyUniqueThresholdsSplitSearcher.cs index 8ec6184b..4a8063b7 100644 --- a/src/SharpLearning.DecisionTrees/SplitSearchers/OnlyUniqueThresholdsSplitSearcher.cs +++ b/src/SharpLearning.DecisionTrees/SplitSearchers/OnlyUniqueThresholdsSplitSearcher.cs @@ -5,7 +5,7 @@ namespace SharpLearning.DecisionTrees.SplitSearchers; /// -/// Searches for the best split using a brute force approach on all unique threshold values. +/// Searches for the best split using a brute force approach on all unique threshold values. /// The implementation assumes that the features and targets have been sorted /// together using the features as sort criteria /// @@ -15,7 +15,7 @@ public sealed class OnlyUniqueThresholdsSplitSearcher : ISplitSearcher readonly double m_minimumLeafWeight; /// - /// Searches for the best split using a brute force approach on all unique threshold values. + /// Searches for the best split using a brute force approach on all unique threshold values. /// The implementation assumes that the features and targets have been sorted /// together using the features as sort criteria /// @@ -26,7 +26,7 @@ public OnlyUniqueThresholdsSplitSearcher(int minimumSplitSize) } /// - /// Searches for the best split using a brute force approach on all unique threshold values. + /// Searches for the best split using a brute force approach on all unique threshold values. /// The implementation assumes that the features and targets have been sorted /// together using the features as sort criteria /// @@ -43,9 +43,8 @@ public OnlyUniqueThresholdsSplitSearcher(int minimumSplitSize, double minimumLea m_minimumLeafWeight = minimumLeafWeight; } - /// - /// Searches for the best split using a brute force approach on all unique threshold values. + /// Searches for the best split using a brute force approach on all unique threshold values. /// The implementation assumes that the features and targets have been sorted /// together using the features as sort criteria /// @@ -58,7 +57,6 @@ public OnlyUniqueThresholdsSplitSearcher(int minimumSplitSize, double minimumLea public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[] feature, double[] targets, Interval1D parentInterval, double parentImpurity) { - var bestSplitIndex = -1; var bestThreshold = 0.0; var bestImpurityImprovement = 0.0; diff --git a/src/SharpLearning.DecisionTrees/SplitSearchers/RandomSplitSearcher.cs b/src/SharpLearning.DecisionTrees/SplitSearchers/RandomSplitSearcher.cs index babb0f6e..64396c28 100644 --- a/src/SharpLearning.DecisionTrees/SplitSearchers/RandomSplitSearcher.cs +++ b/src/SharpLearning.DecisionTrees/SplitSearchers/RandomSplitSearcher.cs @@ -33,7 +33,7 @@ public RandomSplitSearcher(int minimumSplitSize, int seed) } /// - /// + /// /// /// /// @@ -108,7 +108,7 @@ public SplitResult FindBestSplit(IImpurityCalculator impurityCalculator, double[ } /// - /// + /// /// /// /// diff --git a/src/SharpLearning.DecisionTrees/SplitSearchers/SplitResult.cs b/src/SharpLearning.DecisionTrees/SplitSearchers/SplitResult.cs index 5b6c9177..69c541ad 100644 --- a/src/SharpLearning.DecisionTrees/SplitSearchers/SplitResult.cs +++ b/src/SharpLearning.DecisionTrees/SplitSearchers/SplitResult.cs @@ -3,7 +3,7 @@ namespace SharpLearning.DecisionTrees.SplitSearchers; /// -/// +/// /// public struct SplitResult : IEquatable { @@ -23,17 +23,17 @@ public struct SplitResult : IEquatable public readonly double ImpurityImprovement; /// - /// Impurity of the left side of the split + /// Impurity of the left side of the split /// public readonly double ImpurityLeft; /// - /// Impurity of the right side of the split + /// Impurity of the right side of the split /// public readonly double ImpurityRight; /// - /// + /// /// /// Split index within the feature used for split /// Threshold used for splitting @@ -60,7 +60,7 @@ public static SplitResult Initial() } /// - /// + /// /// /// /// @@ -70,25 +70,21 @@ public bool Equals(SplitResult other) if (!Equal(Threshold, other.Threshold)) { return false; } if (!Equal(ImpurityImprovement, other.ImpurityImprovement)) { return false; } if (!Equal(ImpurityLeft, other.ImpurityLeft)) { return false; } - if (!Equal(ImpurityRight, other.ImpurityRight)) { return false; } - - return true; + return Equal(ImpurityRight, other.ImpurityRight); } /// - /// + /// /// /// /// public override bool Equals(object obj) { - if (obj is SplitResult) - return Equals((SplitResult)obj); - return false; + return obj is SplitResult splitResult && Equals(splitResult); } /// - /// + /// /// /// /// @@ -99,7 +95,7 @@ public override bool Equals(object obj) } /// - /// + /// /// /// /// @@ -110,7 +106,7 @@ public override bool Equals(object obj) } /// - /// + /// /// /// public override int GetHashCode() @@ -122,16 +118,11 @@ public override int GetHashCode() ImpurityRight.GetHashCode(); } - const double m_tolerence = 0.00001; + const double Tolerence = 0.00001; static bool Equal(double a, double b) { - var diff = Math.Abs(a * m_tolerence); - if (Math.Abs(a - b) <= diff) - { - return true; - } - - return false; + var diff = Math.Abs(a * Tolerence); + return Math.Abs(a - b) <= diff; } } diff --git a/src/SharpLearning.DecisionTrees/TreeBuilders/BestFirstTreeBuilder.cs b/src/SharpLearning.DecisionTrees/TreeBuilders/BestFirstTreeBuilder.cs index 37d85e1f..f853d367 100644 --- a/src/SharpLearning.DecisionTrees/TreeBuilders/BestFirstTreeBuilder.cs +++ b/src/SharpLearning.DecisionTrees/TreeBuilders/BestFirstTreeBuilder.cs @@ -10,8 +10,8 @@ namespace SharpLearning.DecisionTrees.TreeBuilders; /// -/// Builds a decision tree in a best first manner. -/// This method enables maximum leaf nodes to be set. +/// Builds a decision tree in a best first manner. +/// This method enables maximum leaf nodes to be set. /// public sealed class BestFirstTreeBuilder : ITreeBuilder { @@ -45,14 +45,14 @@ public sealed class BestFirstTreeBuilder : ITreeBuilder double[] m_variableImportance = []; /// - /// + /// /// /// The maximal tree depth before a leaf is generated /// The maximal allowed leaf nodes in the tree - /// The number of features to be selected between at each split. + /// The number of features to be selected between at each split. /// 0 means use all available features /// The minimum improvement in information gain before a split is made - /// Seed for feature selection if number of features pr split is not equal + /// Seed for feature selection if number of features pr split is not equal /// to the total amount of features in observations. The features will be selected at random for each split /// The type of searcher used for finding the best features splits when learning the tree /// Impurity calculator used to decide which split is optimal @@ -80,7 +80,7 @@ public BestFirstTreeBuilder(int maximumTreeDepth, } /// - /// + /// /// /// /// diff --git a/src/SharpLearning.DecisionTrees/TreeBuilders/DepthFirstTreeBuilder.cs b/src/SharpLearning.DecisionTrees/TreeBuilders/DepthFirstTreeBuilder.cs index d618296e..79b14762 100644 --- a/src/SharpLearning.DecisionTrees/TreeBuilders/DepthFirstTreeBuilder.cs +++ b/src/SharpLearning.DecisionTrees/TreeBuilders/DepthFirstTreeBuilder.cs @@ -43,13 +43,13 @@ public sealed class DepthFirstTreeBuilder : ITreeBuilder double[] m_variableImportance = []; /// - /// + /// /// /// The maximal tree depth before a leaf is generated - /// The number of features to be selected between at each split. + /// The number of features to be selected between at each split. /// 0 means use all available features /// The minimum improvement in information gain before a split is made - /// Seed for feature selection if number of features pr split is not equal + /// Seed for feature selection if number of features pr split is not equal /// to the total amount of features in observations. The features will be selected at random for each split /// The type of searcher used for finding the best features splits when learning the tree /// Impurity calculator used to decide which split is optimal @@ -74,7 +74,7 @@ public DepthFirstTreeBuilder(int maximumTreeDepth, } /// - /// + /// /// /// /// diff --git a/src/SharpLearning.DecisionTrees/TreeBuilders/ITreeBuilder.cs b/src/SharpLearning.DecisionTrees/TreeBuilders/ITreeBuilder.cs index 4b087b5c..31726723 100644 --- a/src/SharpLearning.DecisionTrees/TreeBuilders/ITreeBuilder.cs +++ b/src/SharpLearning.DecisionTrees/TreeBuilders/ITreeBuilder.cs @@ -9,7 +9,7 @@ namespace SharpLearning.DecisionTrees.TreeBuilders; public interface ITreeBuilder { /// - /// + /// /// /// /// diff --git a/src/SharpLearning.Ensemble.Test/DataSetUtilities.cs b/src/SharpLearning.Ensemble.Test/DataSetUtilities.cs index fabaca7d..d3acc661 100644 --- a/src/SharpLearning.Ensemble.Test/DataSetUtilities.cs +++ b/src/SharpLearning.Ensemble.Test/DataSetUtilities.cs @@ -478,5 +478,4 @@ public static (F64Matrix observations, double[] targets) LoadDecisionTreeDataSet 1.52065;14.36;0;2.02;73.42;0;8.44;1.64;0;7 1.51651;14.38;0;1.94;73.61;0;8.48;1.57;0;7 1.51711;14.23;0;2.08;73.36;0;8.62;1.67;0;7"; - } diff --git a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/ForwardSearchClassificationEnsembleSelectionTest.cs b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/ForwardSearchClassificationEnsembleSelectionTest.cs index 2cd72e18..9f2703f8 100644 --- a/src/SharpLearning.Ensemble.Test/EnsembleSelectors/ForwardSearchClassificationEnsembleSelectionTest.cs +++ b/src/SharpLearning.Ensemble.Test/EnsembleSelectors/ForwardSearchClassificationEnsembleSelectionTest.cs @@ -63,7 +63,6 @@ public void ForwardSearchClassificationEnsembleSelection_Constructor_Number_Of_A new LogLossClassificationProbabilityMetric(), new MeanProbabilityClassificationEnsembleStrategy(), 5, 1, true); - var observations = new ProbabilityPrediction[3][]; observations.Select(t => new ProbabilityPrediction[10]).ToArray(); var targets = new double[10]; diff --git a/src/SharpLearning.Ensemble.Test/Learners/ClassificationBackwardEliminationModelSelectingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/ClassificationBackwardEliminationModelSelectingEnsembleLearnerTest.cs index c2440030..819e1e42 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/ClassificationBackwardEliminationModelSelectingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/ClassificationBackwardEliminationModelSelectingEnsembleLearnerTest.cs @@ -29,7 +29,7 @@ public void ClassificationBackwardEliminationModelSelectingEnsembleLearner_Learn new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var sut = new ClassificationBackwardEliminationModelSelectingEnsembleLearner(learners, 5); @@ -61,7 +61,7 @@ public void ClassificationBackwardEliminationModelSelectingEnsembleLearner_Creat new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var sut = new ClassificationBackwardEliminationModelSelectingEnsembleLearner(learners, 5); @@ -95,7 +95,7 @@ public void ClassificationBackwardEliminationModelSelectingEnsembleLearner_Learn new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var metric = new LogLossClassificationProbabilityMetric(); diff --git a/src/SharpLearning.Ensemble.Test/Learners/ClassificationEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/ClassificationEnsembleLearnerTest.cs index 8c4bc7f6..61269077 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/ClassificationEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/ClassificationEnsembleLearnerTest.cs @@ -20,7 +20,7 @@ public void ClassificationEnsembleLearner_Learn() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var sut = new ClassificationEnsembleLearner(learners, @@ -45,7 +45,7 @@ public void ClassificationEnsembleLearner_Learn_Bagging() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var sut = new ClassificationEnsembleLearner(learners, @@ -70,7 +70,7 @@ public void ClassificationEnsembleLearner_Learn_Indexed() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var sut = new ClassificationEnsembleLearner(learners, diff --git a/src/SharpLearning.Ensemble.Test/Learners/ClassificationForwardSearchModelSelectingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/ClassificationForwardSearchModelSelectingEnsembleLearnerTest.cs index e1f22d77..c4672e3e 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/ClassificationForwardSearchModelSelectingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/ClassificationForwardSearchModelSelectingEnsembleLearnerTest.cs @@ -29,7 +29,7 @@ public void ClassificationForwardSearchModelSelectingEnsembleLearner_Learn() new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var sut = new ClassificationForwardSearchModelSelectingEnsembleLearner(learners, 5); @@ -61,7 +61,7 @@ public void ClassificationForwardSearchModelSelectingEnsembleLearner_Learn_Witho new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var metric = new LogLossClassificationProbabilityMetric(); @@ -96,7 +96,7 @@ public void ClassificationForwardSearchModelSelectingEnsembleLearner_Learn_Start new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var metric = new LogLossClassificationProbabilityMetric(); @@ -131,7 +131,7 @@ public void ClassificationForwardSearchModelSelectingEnsembleLearner_Learn_Index new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var metric = new LogLossClassificationProbabilityMetric(); diff --git a/src/SharpLearning.Ensemble.Test/Learners/ClassificationModelSelectingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/ClassificationModelSelectingEnsembleLearnerTest.cs index 4bbae371..71b068d0 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/ClassificationModelSelectingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/ClassificationModelSelectingEnsembleLearnerTest.cs @@ -31,7 +31,7 @@ public void ClassificationModelSelectingEnsembleLearner_Learn() new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var metric = new LogLossClassificationProbabilityMetric(); @@ -69,7 +69,7 @@ public void ClassificationModelSelectingEnsembleLearner_Learn_Without_Replacemen new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var metric = new LogLossClassificationProbabilityMetric(); @@ -107,7 +107,7 @@ public void ClassificationModelSelectingEnsembleLearner_Learn_Start_With_3_Model new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var metric = new LogLossClassificationProbabilityMetric(); @@ -145,7 +145,7 @@ public void ClassificationModelSelectingEnsembleLearner_Learn_Indexed() new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var metric = new LogLossClassificationProbabilityMetric(); diff --git a/src/SharpLearning.Ensemble.Test/Learners/ClassificationRandomModelSelectingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/ClassificationRandomModelSelectingEnsembleLearnerTest.cs index edd0dfc4..7e367c96 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/ClassificationRandomModelSelectingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/ClassificationRandomModelSelectingEnsembleLearnerTest.cs @@ -29,7 +29,7 @@ public void ClassificationRandomModelSelectingEnsembleLearner_Learn() new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var sut = new ClassificationRandomModelSelectingEnsembleLearner(learners, 5); @@ -61,7 +61,7 @@ public void ClassificationRandomModelSelectingEnsembleLearner_Learn_Without_Repl new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var metric = new LogLossClassificationProbabilityMetric(); @@ -97,7 +97,7 @@ public void ClassificationRandomModelSelectingEnsembleLearner_Learn_Indexed() new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), - new ClassificationDecisionTreeLearner(33) + new ClassificationDecisionTreeLearner(33), }; var metric = new LogLossClassificationProbabilityMetric(); diff --git a/src/SharpLearning.Ensemble.Test/Learners/ClassificationStackingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/ClassificationStackingEnsembleLearnerTest.cs index a8a00b66..0e32dbc3 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/ClassificationStackingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/ClassificationStackingEnsembleLearnerTest.cs @@ -20,7 +20,7 @@ public void ClassificationStackingEnsembleLearner_Learn() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var sut = new ClassificationStackingEnsembleLearner(learners, @@ -46,7 +46,7 @@ public void ClassificationStackingEnsembleLearner_CreateMetaFeatures_Then_Learn( new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var sut = new ClassificationStackingEnsembleLearner(learners, @@ -74,7 +74,7 @@ public void ClassificationStackingEnsembleLearner_Learn_Include_Original_Feature new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var sut = new ClassificationStackingEnsembleLearner(learners, @@ -100,7 +100,7 @@ public void ClassificationStackingEnsembleLearner_Learn_Indexed() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var sut = new ClassificationStackingEnsembleLearner(learners, diff --git a/src/SharpLearning.Ensemble.Test/Learners/RegressionBackwardEliminationModelSelectingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/RegressionBackwardEliminationModelSelectingEnsembleLearnerTest.cs index c6d75895..4820bf8b 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/RegressionBackwardEliminationModelSelectingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/RegressionBackwardEliminationModelSelectingEnsembleLearnerTest.cs @@ -26,8 +26,7 @@ public void RegressionBackwardEliminationModelSelectingEnsembleLearner_Learn() new RegressionDecisionTreeLearner(14), new RegressionDecisionTreeLearner(17), new RegressionDecisionTreeLearner(19), - new RegressionDecisionTreeLearner(33) - + new RegressionDecisionTreeLearner(33), }; var sut = new RegressionBackwardEliminationModelSelectingEnsembleLearner(learners, 5); @@ -59,8 +58,7 @@ public void RegressionBackwardEliminationModelSelectingEnsembleLearner_CreateMet new RegressionDecisionTreeLearner(14), new RegressionDecisionTreeLearner(17), new RegressionDecisionTreeLearner(19), - new RegressionDecisionTreeLearner(33) - + new RegressionDecisionTreeLearner(33), }; var sut = new RegressionBackwardEliminationModelSelectingEnsembleLearner(learners, 5); @@ -94,7 +92,7 @@ public void RegressionBackwardEliminationModelSelectingEnsembleLearner_Learn_Ind new RegressionDecisionTreeLearner(14), new RegressionDecisionTreeLearner(17), new RegressionDecisionTreeLearner(19), - new RegressionDecisionTreeLearner(33) + new RegressionDecisionTreeLearner(33), }; var sut = new RegressionBackwardEliminationModelSelectingEnsembleLearner(learners, 5); diff --git a/src/SharpLearning.Ensemble.Test/Learners/RegressionEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/RegressionEnsembleLearnerTest.cs index 9ff1c8ab..a42f76c0 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/RegressionEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/RegressionEnsembleLearnerTest.cs @@ -19,7 +19,7 @@ public void RegressionEnsembleLearner_Learn() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var sut = new RegressionEnsembleLearner(learners, new MeanRegressionEnsembleStrategy()); @@ -43,7 +43,7 @@ public void RegressionEnsembleLearner_Learn_Bagging() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var sut = new RegressionEnsembleLearner(learners, new MeanRegressionEnsembleStrategy(), 0.7); @@ -67,7 +67,7 @@ public void RegressionEnsembleLearner_Learn_Indexed() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var sut = new RegressionEnsembleLearner(learners, new MeanRegressionEnsembleStrategy()); diff --git a/src/SharpLearning.Ensemble.Test/Learners/RegressionForwardSearchModelSelectingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/RegressionForwardSearchModelSelectingEnsembleLearnerTest.cs index fa4da0b5..64a59395 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/RegressionForwardSearchModelSelectingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/RegressionForwardSearchModelSelectingEnsembleLearnerTest.cs @@ -28,8 +28,7 @@ public void RegressionForwardSearchModelSelectingEnsembleLearner_Learn() new RegressionDecisionTreeLearner(14), new RegressionDecisionTreeLearner(17), new RegressionDecisionTreeLearner(19), - new RegressionDecisionTreeLearner(33) - + new RegressionDecisionTreeLearner(33), }; var sut = new RegressionForwardSearchModelSelectingEnsembleLearner(learners, 5); @@ -61,8 +60,7 @@ public void RegressionForwardSearchModelSelectingEnsembleLearner_Learn_Without_R new RegressionDecisionTreeLearner(14), new RegressionDecisionTreeLearner(17), new RegressionDecisionTreeLearner(19), - new RegressionDecisionTreeLearner(33) - + new RegressionDecisionTreeLearner(33), }; var metric = new MeanSquaredErrorRegressionMetric(); @@ -97,8 +95,7 @@ public void RegressionForwardSearchModelSelectingEnsembleLearner_Learn_Start_Wit new RegressionDecisionTreeLearner(14), new RegressionDecisionTreeLearner(17), new RegressionDecisionTreeLearner(19), - new RegressionDecisionTreeLearner(33) - + new RegressionDecisionTreeLearner(33), }; var metric = new MeanSquaredErrorRegressionMetric(); @@ -133,7 +130,7 @@ public void RegressionForwardSearchModelSelectingEnsembleLearner_Learn_Indexed() new RegressionDecisionTreeLearner(14), new RegressionDecisionTreeLearner(17), new RegressionDecisionTreeLearner(19), - new RegressionDecisionTreeLearner(33) + new RegressionDecisionTreeLearner(33), }; var sut = new RegressionForwardSearchModelSelectingEnsembleLearner(learners, 5); diff --git a/src/SharpLearning.Ensemble.Test/Learners/RegressionRandomModelSelectingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/RegressionRandomModelSelectingEnsembleLearnerTest.cs index db082c59..5a546937 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/RegressionRandomModelSelectingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/RegressionRandomModelSelectingEnsembleLearnerTest.cs @@ -28,8 +28,7 @@ public void RegressionRandomModelSelectingEnsembleLearner_Learn() new RegressionDecisionTreeLearner(14), new RegressionDecisionTreeLearner(17), new RegressionDecisionTreeLearner(19), - new RegressionDecisionTreeLearner(33) - + new RegressionDecisionTreeLearner(33), }; var sut = new RegressionRandomModelSelectingEnsembleLearner(learners, 5); @@ -61,8 +60,7 @@ public void RegressionRandomModelSelectingEnsembleLearner_Learn_Without_Replacem new RegressionDecisionTreeLearner(14), new RegressionDecisionTreeLearner(17), new RegressionDecisionTreeLearner(19), - new RegressionDecisionTreeLearner(33) - + new RegressionDecisionTreeLearner(33), }; var metric = new MeanSquaredErrorRegressionMetric(); @@ -98,8 +96,7 @@ public void RegressionRandomModelSelectingEnsembleLearner_Learn_Start_With_3_Mod new RegressionDecisionTreeLearner(14), new RegressionDecisionTreeLearner(17), new RegressionDecisionTreeLearner(19), - new RegressionDecisionTreeLearner(33) - + new RegressionDecisionTreeLearner(33), }; var metric = new MeanSquaredErrorRegressionMetric(); @@ -135,7 +132,7 @@ public void RegressionRandomModelSelectingEnsembleLearner_Learn_Indexed() new RegressionDecisionTreeLearner(14), new RegressionDecisionTreeLearner(17), new RegressionDecisionTreeLearner(19), - new RegressionDecisionTreeLearner(33) + new RegressionDecisionTreeLearner(33), }; var sut = new RegressionRandomModelSelectingEnsembleLearner(learners, 5); diff --git a/src/SharpLearning.Ensemble.Test/Learners/RegressionStackingEnsembleLearnerTest.cs b/src/SharpLearning.Ensemble.Test/Learners/RegressionStackingEnsembleLearnerTest.cs index fbf85001..0a0c0710 100644 --- a/src/SharpLearning.Ensemble.Test/Learners/RegressionStackingEnsembleLearnerTest.cs +++ b/src/SharpLearning.Ensemble.Test/Learners/RegressionStackingEnsembleLearnerTest.cs @@ -19,7 +19,7 @@ public void RegressionStackingEnsembleLearner_Learn() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var sut = new RegressionStackingEnsembleLearner(learners, @@ -45,7 +45,7 @@ public void RegressionStackingEnsembleLearner_CreateMetaFeatures_Then_Learn() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var sut = new RegressionStackingEnsembleLearner(learners, @@ -73,7 +73,7 @@ public void RegressionStackingEnsembleLearner_Learn_Keep_Original_Features() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var sut = new RegressionStackingEnsembleLearner(learners, @@ -99,7 +99,7 @@ public void RegressionStackingEnsembleLearner_Learn_Indexed() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var sut = new RegressionStackingEnsembleLearner(learners, diff --git a/src/SharpLearning.Ensemble.Test/Models/ClassificationEnsembleModelTest.cs b/src/SharpLearning.Ensemble.Test/Models/ClassificationEnsembleModelTest.cs index 0a128bea..74ce89f5 100644 --- a/src/SharpLearning.Ensemble.Test/Models/ClassificationEnsembleModelTest.cs +++ b/src/SharpLearning.Ensemble.Test/Models/ClassificationEnsembleModelTest.cs @@ -23,7 +23,7 @@ public void ClassificationEnsembleModel_Predict_single() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var learner = new ClassificationEnsembleLearner(learners, @@ -54,7 +54,7 @@ public void ClassificationEnsembleModel_Predict_Multiple() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var learner = new ClassificationEnsembleLearner(learners, @@ -80,7 +80,7 @@ public void ClassificationEnsembleModel_PredictProbability_single() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var learner = new ClassificationEnsembleLearner(learners, @@ -111,7 +111,7 @@ public void ClassificationEnsembleModel_PredictProbability_Multiple() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var learner = new ClassificationEnsembleLearner(learners, @@ -133,14 +133,14 @@ public void ClassificationEnsembleModel_GetVariableImportance() var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, - { "PreviousExperience_month", 1 } }; + { "PreviousExperience_month", 1 }, }; var learners = new IIndexedLearner[] { new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var learner = new ClassificationEnsembleLearner(learners, @@ -150,7 +150,7 @@ public void ClassificationEnsembleModel_GetVariableImportance() var actual = sut.GetVariableImportance(featureNameToIndex); var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, - { "AptitudeTestScore", 15.6771501925546 } }; + { "AptitudeTestScore", 15.6771501925546 }, }; Assert.AreEqual(expected.Count, actual.Count); var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a }); @@ -172,7 +172,7 @@ public void ClassificationEnsembleModel_GetRawVariableImportance() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var learner = new ClassificationEnsembleLearner(learners, diff --git a/src/SharpLearning.Ensemble.Test/Models/ClassificationStackingEnsembleModelTest.cs b/src/SharpLearning.Ensemble.Test/Models/ClassificationStackingEnsembleModelTest.cs index 456046b4..3ab750e0 100644 --- a/src/SharpLearning.Ensemble.Test/Models/ClassificationStackingEnsembleModelTest.cs +++ b/src/SharpLearning.Ensemble.Test/Models/ClassificationStackingEnsembleModelTest.cs @@ -24,7 +24,7 @@ public void ClassificationStackingEnsembleModel_Predict_single() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var learner = new ClassificationStackingEnsembleLearner(learners, @@ -56,7 +56,7 @@ public void ClassificationStackingEnsembleModel_Predict_Multiple() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var learner = new ClassificationStackingEnsembleLearner(learners, @@ -83,7 +83,7 @@ public void ClassificationStackingEnsembleModel_PredictProbability_single() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var learner = new ClassificationStackingEnsembleLearner(learners, @@ -115,7 +115,7 @@ public void ClassificationStackingEnsembleModel_PredictProbability_Multiple() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var learner = new ClassificationStackingEnsembleLearner(learners, @@ -138,14 +138,14 @@ public void ClassificationStackingEnsembleModel_GetVariableImportance() var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, - { "PreviousExperience_month", 1 } }; + { "PreviousExperience_month", 1 }, }; var learners = new IIndexedLearner[] { new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var learner = new ClassificationStackingEnsembleLearner(learners, @@ -179,7 +179,7 @@ public void ClassificationStackingEnsembleModel_GetRawVariableImportance() new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), - new ClassificationDecisionTreeLearner(9) + new ClassificationDecisionTreeLearner(9), }; var learner = new ClassificationStackingEnsembleLearner(learners, @@ -204,7 +204,7 @@ static void WriteImportances(Dictionary featureImportance) var result = "new Dictionary {"; foreach (var item in featureImportance) { - result += "{" + "\"" + item.Key + "\"" + ", " + item.Value + "}, "; + result += "{\"" + item.Key + "\", " + item.Value + "}, "; } Trace.WriteLine(result); diff --git a/src/SharpLearning.Ensemble.Test/Models/RegressionEnsembleModelTest.cs b/src/SharpLearning.Ensemble.Test/Models/RegressionEnsembleModelTest.cs index b554ad8f..17ca8e29 100644 --- a/src/SharpLearning.Ensemble.Test/Models/RegressionEnsembleModelTest.cs +++ b/src/SharpLearning.Ensemble.Test/Models/RegressionEnsembleModelTest.cs @@ -22,7 +22,7 @@ public void RegressionEnsembleModel_Predict_single() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var learner = new RegressionEnsembleLearner(learners, new MeanRegressionEnsembleStrategy()); @@ -51,7 +51,7 @@ public void RegressionEnsembleModel_Predict_Multiple() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var learner = new RegressionEnsembleLearner(learners, new MeanRegressionEnsembleStrategy()); @@ -71,14 +71,14 @@ public void RegressionEnsembleModel_GetVariableImportance() var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, - { "PreviousExperience_month", 1 } }; + { "PreviousExperience_month", 1 }, }; var learners = new IIndexedLearner[] { new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var learner = new RegressionEnsembleLearner(learners, new MeanRegressionEnsembleStrategy()); @@ -86,7 +86,7 @@ public void RegressionEnsembleModel_GetVariableImportance() var actual = sut.GetVariableImportance(featureNameToIndex); var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, - { "AptitudeTestScore", 3.46067371526717 } }; + { "AptitudeTestScore", 3.46067371526717 }, }; Assert.AreEqual(expected.Count, actual.Count); var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a }); @@ -108,7 +108,7 @@ public void RegressionEnsembleModel_GetRawVariableImportance() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var learner = new RegressionEnsembleLearner(learners, new MeanRegressionEnsembleStrategy()); diff --git a/src/SharpLearning.Ensemble.Test/Models/RegressionStackingEnsembleModelTest.cs b/src/SharpLearning.Ensemble.Test/Models/RegressionStackingEnsembleModelTest.cs index df2a94eb..f46a0a11 100644 --- a/src/SharpLearning.Ensemble.Test/Models/RegressionStackingEnsembleModelTest.cs +++ b/src/SharpLearning.Ensemble.Test/Models/RegressionStackingEnsembleModelTest.cs @@ -26,7 +26,7 @@ public void RegressionStackingEnsembleModel_Predict_single() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var learner = new RegressionStackingEnsembleLearner(learners, @@ -58,7 +58,7 @@ public void RegressionStackingEnsembleModel_Predict_Multiple() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var learner = new RegressionStackingEnsembleLearner(learners, @@ -81,14 +81,14 @@ public void RegressionStackingEnsembleModel_GetVariableImportance() var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, - { "PreviousExperience_month", 1 } }; + { "PreviousExperience_month", 1 }, }; var learners = new IIndexedLearner[] { new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var learner = new RegressionStackingEnsembleLearner(learners, @@ -120,7 +120,7 @@ public void RegressionStackingEnsembleModel_GetRawVariableImportance() new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), - new RegressionDecisionTreeLearner(9) + new RegressionDecisionTreeLearner(9), }; var learner = new RegressionStackingEnsembleLearner(learners, @@ -160,7 +160,7 @@ static void WriteImportances(Dictionary featureImportance) var result = "new Dictionary {"; foreach (var item in featureImportance) { - result += "{" + "\"" + item.Key + "\"" + ", " + item.Value + "}, "; + result += "{\"" + item.Key + "\", " + item.Value + "}, "; } Trace.WriteLine(result); diff --git a/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationClassificationEnsembleSelection.cs b/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationClassificationEnsembleSelection.cs index 1976213f..7496fd55 100644 --- a/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationClassificationEnsembleSelection.cs +++ b/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationClassificationEnsembleSelection.cs @@ -40,7 +40,7 @@ public BackwardEliminationClassificationEnsembleSelection( /// /// Greedy backwards elimination of ensemble models. /// - /// cross validated predictions from multiple models. + /// cross validated predictions from multiple models. /// Each row in the matrix corresponds to predictions from a separate model /// Corresponding targets /// The indices of the selected model @@ -78,7 +78,7 @@ double SelectNextModelToRemove(ProbabilityPrediction[][] crossValidatedModelPred double[] targets, double currentBestError) { - var rows = crossValidatedModelPredictions.First().Length; + var rows = crossValidatedModelPredictions[0].Length; var candidateModelMatrix = new ProbabilityPrediction[m_remainingModelIndices.Count - 1][]; var candidatePredictions = new ProbabilityPrediction[rows]; var candidateModelIndices = new int[m_remainingModelIndices.Count - 1]; diff --git a/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationRegressionEnsembleSelection.cs b/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationRegressionEnsembleSelection.cs index f7288655..3db7a218 100644 --- a/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationRegressionEnsembleSelection.cs +++ b/src/SharpLearning.Ensemble/EnsembleSelectors/BackwardEliminationRegressionEnsembleSelection.cs @@ -40,7 +40,7 @@ public BackwardEliminationRegressionEnsembleSelection( /// /// Greedy backward elimination of ensemble models. /// - /// cross validated predictions from multiple models. + /// cross validated predictions from multiple models. /// Each column in the matrix corresponds to predictions from a separate model /// Corresponding targets /// The indices of the selected model diff --git a/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchClassificationEnsembleSelection.cs b/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchClassificationEnsembleSelection.cs index 9086d987..09c2bf87 100644 --- a/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchClassificationEnsembleSelection.cs +++ b/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchClassificationEnsembleSelection.cs @@ -27,7 +27,7 @@ public sealed class ForwardSearchClassificationEnsembleSelection : IClassificati /// Metric to minimize /// Strategy for ensembling models /// Number of models to select - /// Number of models from start of the search. + /// Number of models from start of the search. /// The top n models will be selected based in their solo performance /// If true the same model can be selected multiple times. /// This will correspond to weighting the models. If false each model can only be selected once @@ -52,7 +52,7 @@ public ForwardSearchClassificationEnsembleSelection( /// /// Greedy forward selection of ensemble models. /// - /// cross validated predictions from multiple models. + /// cross validated predictions from multiple models. /// Each row in the matrix corresponds to predictions from a separate model /// Corresponding targets /// The indices of the selected model @@ -104,7 +104,7 @@ double SelectNextModelToAdd(ProbabilityPrediction[][] crossValidatedModelPredict double[] targets, double currentBestError) { - var rows = crossValidatedModelPredictions.First().Length; + var rows = crossValidatedModelPredictions[0].Length; var candidateModelMatrix = new ProbabilityPrediction[m_selectedModelIndices.Count + 1][]; var candidatePredictions = new ProbabilityPrediction[rows]; var candidateModelIndices = new int[m_selectedModelIndices.Count + 1]; diff --git a/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchRegressionEnsembleSelection.cs b/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchRegressionEnsembleSelection.cs index 11e549e3..78970d9c 100644 --- a/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchRegressionEnsembleSelection.cs +++ b/src/SharpLearning.Ensemble/EnsembleSelectors/ForwardSearchRegressionEnsembleSelection.cs @@ -27,7 +27,7 @@ public sealed class ForwardSearchRegressionEnsembleSelection : IRegressionEnsemb /// Metric to minimize /// Strategy for ensembling models /// Number of models to select - /// Number of models from start of the search. + /// Number of models from start of the search. /// The top n models will be selected based in their solo performance /// If true the same model can be selected multiple times. /// This will correspond to weighting the models. If false each model can only be selected once @@ -48,7 +48,7 @@ public ForwardSearchRegressionEnsembleSelection(IMetric metric, /// /// Greedy forward selection of ensemble models. /// - /// cross validated predictions from multiple models. + /// cross validated predictions from multiple models. /// Each column in the matrix corresponds to predictions from a separate model /// Corresponding targets /// The indices of the selected model diff --git a/src/SharpLearning.Ensemble/EnsembleSelectors/RandomClassificationEnsembleSelection.cs b/src/SharpLearning.Ensemble/EnsembleSelectors/RandomClassificationEnsembleSelection.cs index c5c8baf9..7823702b 100644 --- a/src/SharpLearning.Ensemble/EnsembleSelectors/RandomClassificationEnsembleSelection.cs +++ b/src/SharpLearning.Ensemble/EnsembleSelectors/RandomClassificationEnsembleSelection.cs @@ -54,7 +54,7 @@ public RandomClassificationEnsembleSelection( /// /// Greedy forward selection of ensemble models. /// - /// cross validated predictions from multiple models. + /// cross validated predictions from multiple models. /// Each row in the matrix corresponds to predictions from a separate model /// Corresponding targets /// The indices of the selected model @@ -68,7 +68,7 @@ public int[] Select(ProbabilityPrediction[][] crossValidatedModelPredictions, do m_allIndices = Enumerable.Range(0, crossValidatedModelPredictions.Length).ToArray(); - var rows = crossValidatedModelPredictions.First().Length; + var rows = crossValidatedModelPredictions[0].Length; var candidateModelMatrix = new ProbabilityPrediction[m_numberOfModelsToSelect][]; var candidatePredictions = new ProbabilityPrediction[rows]; var candidateModelIndices = new int[m_numberOfModelsToSelect]; diff --git a/src/SharpLearning.Ensemble/EnsembleSelectors/RandomRegressionEnsembleSelection.cs b/src/SharpLearning.Ensemble/EnsembleSelectors/RandomRegressionEnsembleSelection.cs index 87c3d834..f507a4da 100644 --- a/src/SharpLearning.Ensemble/EnsembleSelectors/RandomRegressionEnsembleSelection.cs +++ b/src/SharpLearning.Ensemble/EnsembleSelectors/RandomRegressionEnsembleSelection.cs @@ -54,7 +54,7 @@ public RandomRegressionEnsembleSelection( /// /// Iterative random selection of ensemble models. /// - /// cross validated predictions from multiple models. + /// cross validated predictions from multiple models. /// Each column in the matrix corresponds to predictions from a separate model /// Corresponding targets /// The indices of the selected model diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationBackwardEliminationModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationBackwardEliminationModelSelectingEnsembleLearner.cs index 652bdf8b..2cb342e3 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationBackwardEliminationModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationBackwardEliminationModelSelectingEnsembleLearner.cs @@ -8,17 +8,17 @@ namespace SharpLearning.Ensemble.Learners; /// -/// Classification model selecting EnsembleLearner. +/// Classification model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using greedy backward elimination. -/// The selection of the best set of models is based on cross validation. +/// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// public sealed class ClassificationBackwardEliminationModelSelectingEnsembleLearner : ClassificationModelSelectingEnsembleLearner { /// - /// Classification model selecting EnsembleLearner. + /// Classification model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using greedy backward elimination. - /// The selection of the best set of models is based on cross validation. + /// The selection of the best set of models is based on cross validation. /// Default is 5-fold StratifiedCrossValidation and minimization of multi-class log loss and mean of probabilities is used to combine the models. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// @@ -35,9 +35,9 @@ public ClassificationBackwardEliminationModelSelectingEnsembleLearner( } /// - /// Classification model selecting EnsembleLearner. + /// Classification model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using greedy backward elimination. - /// The selection of the best set of models is based on cross validation. + /// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// /// Learners in the ensemble diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationEnsembleLearner.cs index da7098aa..07eefa3d 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationEnsembleLearner.cs @@ -29,7 +29,7 @@ public sealed class ClassificationEnsembleLearner /// Default combination method is mean of the probabilities of the models. /// /// Learners in the ensemble - /// Default is 1.0. All models are trained on all data. + /// Default is 1.0. All models are trained on all data. /// If different from 1.0 models are trained using bagging with the chosen sub sample ratio /// Seed for the bagging when used public ClassificationEnsembleLearner( @@ -46,7 +46,7 @@ public ClassificationEnsembleLearner( /// /// Learners in the ensemble /// Strategy on how to combine the models - /// Default is 1.0. All models are trained on all data. + /// Default is 1.0. All models are trained on all data. /// If different from 1.0 models are trained using bagging with the chosen sub sample ratio /// Seed for the bagging when used public ClassificationEnsembleLearner( @@ -64,7 +64,7 @@ public ClassificationEnsembleLearner( /// /// Learners in the ensemble /// Strategy on how to combine the models - /// Default is 1.0. All models are trained on all data. + /// Default is 1.0. All models are trained on all data. /// If different from 1.0 models are trained using bagging with the chosen sub sample ratio /// Seed for the bagging when used public ClassificationEnsembleLearner( diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationForwardSearchModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationForwardSearchModelSelectingEnsembleLearner.cs index a462fa57..9aa32e32 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationForwardSearchModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationForwardSearchModelSelectingEnsembleLearner.cs @@ -8,17 +8,17 @@ namespace SharpLearning.Ensemble.Learners; /// -/// Classification model selecting EnsembleLearner. +/// Classification model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using greedy forward selection. -/// The selection of the best set of models is based on cross validation. +/// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// public sealed class ClassificationForwardSearchModelSelectingEnsembleLearner : ClassificationModelSelectingEnsembleLearner { /// - /// Classification model selecting EnsembleLearner. + /// Classification model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using greedy forward selection. - /// The selection of the best set of models is based on cross validation. + /// The selection of the best set of models is based on cross validation. /// Default is 5-fold StratifiedCrossValidation and minimization of multi-class log loss and mean of probabilities is used to combine the models. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// @@ -35,9 +35,9 @@ public ClassificationForwardSearchModelSelectingEnsembleLearner( } /// - /// Classification model selecting EnsembleLearner. + /// Classification model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using greedy forward selection. - /// The selection of the best set of models is based on cross validation. + /// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// /// Learners in the ensemble @@ -45,7 +45,7 @@ public ClassificationForwardSearchModelSelectingEnsembleLearner( /// Cross validation method /// Strategy for ensembling models /// Metric to minimize - /// Number of models from start of the search. + /// Number of models from start of the search. /// The top n models will be selected based in their solo performance /// If true the same model can be selected multiple times. /// This will correspond to weighting the models. If false each model can only be selected once. Default is true diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationModelSelectingEnsembleLearner.cs index eb6e87c1..4d724a0a 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationModelSelectingEnsembleLearner.cs @@ -28,7 +28,7 @@ public class ClassificationModelSelectingEnsembleLearner readonly IClassificationEnsembleSelection m_ensembleSelection; /// - /// Classification model selecting EnsembleLearner. + /// Classification model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble. /// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf @@ -47,7 +47,7 @@ public ClassificationModelSelectingEnsembleLearner( } /// - /// Classification model selecting EnsembleLearner. + /// Classification model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble. /// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationRandomModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationRandomModelSelectingEnsembleLearner.cs index 9a681da7..839702b4 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationRandomModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationRandomModelSelectingEnsembleLearner.cs @@ -8,23 +8,23 @@ namespace SharpLearning.Ensemble.Learners; /// -/// Classification model selecting EnsembleLearner. +/// Classification model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using iterative random selection. -/// The selection of the best set of models is based on cross validation. +/// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// public sealed class ClassificationRandomModelSelectingEnsembleLearner : ClassificationModelSelectingEnsembleLearner { /// - /// Classification model selecting EnsembleLearner. + /// Classification model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using iterative random selection. - /// The selection of the best set of models is based on cross validation. + /// The selection of the best set of models is based on cross validation. /// Default is 5-fold StratifiedCrossValidation and minimization of mean square error and mean is used to combine the models. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// /// Learners in the ensemble /// Number of models to select - /// Number of iterations to random select model combinations. + /// Number of iterations to random select model combinations. public ClassificationRandomModelSelectingEnsembleLearner( IIndexedLearner[] learners, int numberOfModelsToSelect, @@ -37,9 +37,9 @@ public ClassificationRandomModelSelectingEnsembleLearner( } /// - /// Classification model selecting EnsembleLearner. + /// Classification model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using iterative random selection. - /// The selection of the best set of models is based on cross validation. + /// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// /// Learners in the ensemble @@ -47,7 +47,7 @@ public ClassificationRandomModelSelectingEnsembleLearner( /// Cross validation method /// Strategy for ensembling models /// Metric to minimize - /// Number of iterations to random select model combinations. + /// Number of iterations to random select model combinations. /// If true the same model can be selected multiple times. /// public ClassificationRandomModelSelectingEnsembleLearner( diff --git a/src/SharpLearning.Ensemble/Learners/ClassificationStackingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/ClassificationStackingEnsembleLearner.cs index 814ff379..d853f593 100644 --- a/src/SharpLearning.Ensemble/Learners/ClassificationStackingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/ClassificationStackingEnsembleLearner.cs @@ -26,14 +26,14 @@ public sealed class ClassificationStackingEnsembleLearner readonly bool m_includeOriginalFeaturesForMetaLearner; /// - /// Stacking Classification Ensemble Learner. + /// Stacking Classification Ensemble Learner. /// Combines several models into a single ensemble model using a top or meta level model to combine the models. /// The bottom level models generates output for the top level model using cross validation. /// Default is 5-fold StratifiedCrossValidation. /// /// Learners in the ensemble /// Meta learner or top level model for combining the ensemble models - /// True; the meta learner also receives the original features. + /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features. Default is true public ClassificationStackingEnsembleLearner( IIndexedLearner[] learners, @@ -45,16 +45,15 @@ public ClassificationStackingEnsembleLearner( { } - /// - /// Stacking Classification Ensemble Learner. + /// Stacking Classification Ensemble Learner. /// Combines several models into a single ensemble model using a top or meta level model to combine the models. /// The bottom level models generates output for the top level model using cross validation. /// /// Learners in the ensemble /// Meta learner or top level model for combining the ensemble models /// Cross validation method - /// True; the meta learner also receives the original features. + /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features. Default is true public ClassificationStackingEnsembleLearner( IIndexedLearner[] learners, @@ -67,14 +66,14 @@ public ClassificationStackingEnsembleLearner( } /// - /// Stacking Classification Ensemble Learner. + /// Stacking Classification Ensemble Learner. /// Combines several models into a single ensemble model using a top or meta level model to combine the models. /// Combines several models into a single ensemble model using a top or meta level model to combine the models. /// /// Learners in the ensemble /// Meta learner or top level model for combining the ensemble models /// Cross validation method - /// True; the meta learner also receives the original features. + /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features public ClassificationStackingEnsembleLearner( IIndexedLearner[] learners, diff --git a/src/SharpLearning.Ensemble/Learners/RegressionBackwardEliminationModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionBackwardEliminationModelSelectingEnsembleLearner.cs index 370608bc..be6febec 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionBackwardEliminationModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionBackwardEliminationModelSelectingEnsembleLearner.cs @@ -7,7 +7,7 @@ namespace SharpLearning.Ensemble.Learners; /// -/// Regression model selecting EnsembleLearner. +/// Regression model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using backwards elimination. /// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf @@ -15,9 +15,9 @@ namespace SharpLearning.Ensemble.Learners; public sealed class RegressionBackwardEliminationModelSelectingEnsembleLearner : RegressionModelSelectingEnsembleLearner { /// - /// Regression model selecting EnsembleLearner. + /// Regression model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using backwards elimination. - /// The selection of the best set of models is based on cross validation. + /// The selection of the best set of models is based on cross validation. /// Default is 5-fold RandomCrossValidation and minimization of mean square error and mean is used to combine the models. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// @@ -34,9 +34,9 @@ public RegressionBackwardEliminationModelSelectingEnsembleLearner( } /// - /// Regression model selecting EnsembleLearner. + /// Regression model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using backwards elimination. - /// The selection of the best set of models is based on cross validation. + /// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// /// Learners in the ensemble diff --git a/src/SharpLearning.Ensemble/Learners/RegressionEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionEnsembleLearner.cs index e1ce40ec..19bfce83 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionEnsembleLearner.cs @@ -25,7 +25,7 @@ public sealed class RegressionEnsembleLearner : ILearner, IIndexedLearne /// Default combination method is the mean of all model outputs. /// /// Learners in the ensemble - /// Default is 1.0. All models are trained on all data. + /// Default is 1.0. All models are trained on all data. /// If different from 1.0 models are trained using bagging with the chosen sub sample ratio /// Seed for the bagging when used public RegressionEnsembleLearner( @@ -42,7 +42,7 @@ public RegressionEnsembleLearner( /// /// Learners in the ensemble /// Strategy on how to combine the models. Default is mean of all models in the ensemble - /// Default is 1.0. All models are trained on all data. + /// Default is 1.0. All models are trained on all data. /// If different from 1.0 models are trained using bagging with the chosen sub sample ratio /// Seed for the bagging when used public RegressionEnsembleLearner( @@ -60,7 +60,7 @@ public RegressionEnsembleLearner( /// /// Learners in the ensemble /// Strategy on how to combine the models - /// Default is 1.0. All models are trained on all data. + /// Default is 1.0. All models are trained on all data. /// If different from 1.0 models are trained using bagging with the chosen sub sample ratio /// Seed for the bagging when used public RegressionEnsembleLearner( @@ -69,9 +69,9 @@ public RegressionEnsembleLearner( double subSampleRatio = 1.0, int seed = 24) { - m_learners = learners ?? throw new ArgumentNullException("learners"); + m_learners = learners ?? throw new ArgumentNullException(nameof(learners)); if (learners.Length < 1) { throw new ArgumentException("there must be at least 1 learner"); } - m_ensembleStrategy = ensembleStrategy ?? throw new ArgumentNullException("ensembleStrategy"); + m_ensembleStrategy = ensembleStrategy ?? throw new ArgumentNullException(nameof(ensembleStrategy)); m_random = new Random(seed); m_subSampleRatio = subSampleRatio; @@ -127,7 +127,6 @@ public RegressionEnsembleModel Learn(F64Matrix observations, double[] targets, return new RegressionEnsembleModel(ensembleModels, m_ensembleStrategy()); } - void Sample(int[] inSample, int[] allIndices) { for (var i = 0; i < inSample.Length; i++) diff --git a/src/SharpLearning.Ensemble/Learners/RegressionForwardSearchModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionForwardSearchModelSelectingEnsembleLearner.cs index 58540f11..a56d2824 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionForwardSearchModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionForwardSearchModelSelectingEnsembleLearner.cs @@ -7,7 +7,7 @@ namespace SharpLearning.Ensemble.Learners; /// -/// Regression model selecting EnsembleLearner. +/// Regression model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using greedy forward selection. /// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf @@ -15,9 +15,9 @@ namespace SharpLearning.Ensemble.Learners; public sealed class RegressionForwardSearchModelSelectingEnsembleLearner : RegressionModelSelectingEnsembleLearner { /// - /// Regression model selecting EnsembleLearner. + /// Regression model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using greedy forward selection. - /// The selection of the best set of models is based on cross validation. + /// The selection of the best set of models is based on cross validation. /// Default is 5-fold RandomCrossValidation and minimization of mean square error and mean is used to combine the models. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// @@ -34,9 +34,9 @@ public RegressionForwardSearchModelSelectingEnsembleLearner( } /// - /// Regression model selecting EnsembleLearner. + /// Regression model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using greedy forward selection. - /// The selection of the best set of models is based on cross validation. + /// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// /// Learners in the ensemble @@ -44,7 +44,7 @@ public RegressionForwardSearchModelSelectingEnsembleLearner( /// Cross validation method /// Strategy for ensembling models /// Metric to minimize - /// Number of models from start of the search. + /// Number of models from start of the search. /// The top n models will be selected based in their solo performance /// If true the same model can be selected multiple times. /// This will correspond to weighting the models. If false each model can only be selected once. Default is true diff --git a/src/SharpLearning.Ensemble/Learners/RegressionModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionModelSelectingEnsembleLearner.cs index 117fd727..250a7718 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionModelSelectingEnsembleLearner.cs @@ -24,7 +24,7 @@ public class RegressionModelSelectingEnsembleLearner : ILearner, IIndexe readonly IRegressionEnsembleSelection m_ensembleSelection; /// - /// Regression model selecting EnsembleLearner. + /// Regression model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble. /// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf @@ -43,7 +43,7 @@ public RegressionModelSelectingEnsembleLearner( } /// - /// Regression model selecting EnsembleLearner. + /// Regression model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble. /// The selection of the best set of models is based on cross validation. /// Trains several models and selects the best subset of models for the ensemble. diff --git a/src/SharpLearning.Ensemble/Learners/RegressionRandomModelSelectingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionRandomModelSelectingEnsembleLearner.cs index 0856fa58..7656e930 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionRandomModelSelectingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionRandomModelSelectingEnsembleLearner.cs @@ -7,7 +7,7 @@ namespace SharpLearning.Ensemble.Learners; /// -/// Regression model selecting EnsembleLearner. +/// Regression model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using iterative random selection. /// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf @@ -15,15 +15,15 @@ namespace SharpLearning.Ensemble.Learners; public sealed class RegressionRandomModelSelectingEnsembleLearner : RegressionModelSelectingEnsembleLearner { /// - /// Regression model selecting EnsembleLearner. + /// Regression model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using iterative random selection. - /// The selection of the best set of models is based on cross validation. + /// The selection of the best set of models is based on cross validation. /// Default is 5-fold RandomCrossValidation and minimization of mean square error and mean is used to combine the models. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// /// Learners in the ensemble /// Number of models to select - /// Number of iterations to random select model combinations. + /// Number of iterations to random select model combinations. public RegressionRandomModelSelectingEnsembleLearner( IIndexedLearner[] learners, int numberOfModelsToSelect, @@ -35,11 +35,10 @@ public RegressionRandomModelSelectingEnsembleLearner( { } - /// - /// Regression model selecting EnsembleLearner. + /// Regression model selecting EnsembleLearner. /// Trains several models and selects the best subset of models for the ensemble using iterative random selection. - /// The selection of the best set of models is based on cross validation. + /// The selection of the best set of models is based on cross validation. /// http://www.cs.cornell.edu/~alexn/papers/shotgun.icml04.revised.rev2.pdf /// /// Learners in the ensemble @@ -47,7 +46,7 @@ public RegressionRandomModelSelectingEnsembleLearner( /// Cross validation method /// Strategy for ensembling models /// Metric to minimize - /// Number of iterations to random select model combinations. + /// Number of iterations to random select model combinations. /// If true the same model can be selected multiple times. /// public RegressionRandomModelSelectingEnsembleLearner( diff --git a/src/SharpLearning.Ensemble/Learners/RegressionStackingEnsembleLearner.cs b/src/SharpLearning.Ensemble/Learners/RegressionStackingEnsembleLearner.cs index eb15e5dc..c482761e 100644 --- a/src/SharpLearning.Ensemble/Learners/RegressionStackingEnsembleLearner.cs +++ b/src/SharpLearning.Ensemble/Learners/RegressionStackingEnsembleLearner.cs @@ -22,14 +22,14 @@ public sealed class RegressionStackingEnsembleLearner : ILearner, IIndex readonly bool m_includeOriginalFeaturesForMetaLearner; /// - /// Stacking Regression Ensemble Learner. + /// Stacking Regression Ensemble Learner. /// Combines several models into a single ensemble model using a top or meta level model to combine the models. /// The bottom level models generates output for the top level model using cross validation. /// Default is 5-fold RandomCrossValidation. /// /// Learners in the ensemble /// Meta learner or top level model for combining the ensemble models - /// True; the meta learner also receives the original features. + /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features. Default is true public RegressionStackingEnsembleLearner( IIndexedLearner[] learners, @@ -41,14 +41,14 @@ public RegressionStackingEnsembleLearner( } /// - /// Stacking Regression Ensemble Learner. + /// Stacking Regression Ensemble Learner. /// Combines several models into a single ensemble model using a top or meta level model to combine the models. /// The bottom level models generates output for the top level model using cross validation. /// /// Learners in the ensemble /// Meta learner or top level model for combining the ensemble models /// Cross validation method - /// True; the meta learner also receives the original features. + /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features. Default is true public RegressionStackingEnsembleLearner( IIndexedLearner[] learners, @@ -61,14 +61,14 @@ public RegressionStackingEnsembleLearner( } /// - /// Stacking Regression Ensemble Learner. + /// Stacking Regression Ensemble Learner. /// Combines several models into a single ensemble model using a top or meta level model to combine the models. /// The bottom level models generates output for the top level model using cross validation. /// /// Learners in the ensemble /// Meta learner or top level model for combining the ensemble models /// Cross validation method - /// True; the meta learner also receives the original features. + /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features public RegressionStackingEnsembleLearner( IIndexedLearner[] learners, diff --git a/src/SharpLearning.Ensemble/Models/ClassificationEnsembleModel.cs b/src/SharpLearning.Ensemble/Models/ClassificationEnsembleModel.cs index 078b02bc..7c1bdb31 100644 --- a/src/SharpLearning.Ensemble/Models/ClassificationEnsembleModel.cs +++ b/src/SharpLearning.Ensemble/Models/ClassificationEnsembleModel.cs @@ -30,7 +30,7 @@ public ClassificationEnsembleModel(IPredictorModel[] ense } /// - /// + /// /// /// /// @@ -40,7 +40,7 @@ public double Predict(double[] observation) } /// - /// + /// /// /// /// @@ -102,8 +102,7 @@ public double[] GetRawVariableImportance() // return normalized variable importance. // Individual models can have very different scaling of importances var index = 0; - var dummyFeatureNameToIndex = m_ensembleModels - .First().GetRawVariableImportance() + var dummyFeatureNameToIndex = m_ensembleModels[0].GetRawVariableImportance() .ToDictionary(k => index.ToString(), k => index++); return GetVariableImportance(dummyFeatureNameToIndex).Values.ToArray(); diff --git a/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs b/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs index 50ab2c00..af895364 100644 --- a/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs +++ b/src/SharpLearning.Ensemble/Models/ClassificationStackingEnsembleModel.cs @@ -19,11 +19,11 @@ public class ClassificationStackingEnsembleModel : IPredictorModel, IPre readonly int m_numberOfClasses; /// - /// + /// /// /// Models included in the ensemble /// Meta or top level model to combine the ensemble models - /// True; the meta learner also receives the original features. + /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features /// Number of classes in the classification problem public ClassificationStackingEnsembleModel(IPredictorModel[] ensembleModels, @@ -147,7 +147,7 @@ public Dictionary GetVariableImportance(Dictionary } else { - duplicateModelCount[name] += 1; + duplicateModelCount[name]++; } name += "_" + duplicateModelCount[name].ToString(); diff --git a/src/SharpLearning.Ensemble/Models/RegressionEnsembleModel.cs b/src/SharpLearning.Ensemble/Models/RegressionEnsembleModel.cs index 58855ed0..11696d10 100644 --- a/src/SharpLearning.Ensemble/Models/RegressionEnsembleModel.cs +++ b/src/SharpLearning.Ensemble/Models/RegressionEnsembleModel.cs @@ -29,7 +29,7 @@ public RegressionEnsembleModel(IPredictorModel[] ensembleModels, } /// - /// + /// /// /// /// @@ -47,7 +47,7 @@ public double Predict(double[] observation) } /// - /// + /// /// /// /// @@ -73,15 +73,14 @@ public double[] GetRawVariableImportance() // return normalized variable importance. // Individual models can have very different scaling of importances var index = 0; - var dummyFeatureNameToIndex = m_ensembleModels - .First().GetRawVariableImportance() + var dummyFeatureNameToIndex = m_ensembleModels[0].GetRawVariableImportance() .ToDictionary(k => index.ToString(), k => index++); return GetVariableImportance(dummyFeatureNameToIndex).Values.ToArray(); } /// - /// Returns the rescaled (0-100) and sorted variable importance scores with corresponding name + /// Returns the rescaled (0-100) and sorted variable importance scores with corresponding name /// /// /// diff --git a/src/SharpLearning.Ensemble/Models/RegressionStackingEnsembleModel.cs b/src/SharpLearning.Ensemble/Models/RegressionStackingEnsembleModel.cs index 100f883c..9eadf0c0 100644 --- a/src/SharpLearning.Ensemble/Models/RegressionStackingEnsembleModel.cs +++ b/src/SharpLearning.Ensemble/Models/RegressionStackingEnsembleModel.cs @@ -15,11 +15,11 @@ public class RegressionStackingEnsembleModel : IPredictorModel readonly bool m_includeOriginalFeaturesForMetaLearner; /// - /// + /// /// /// Models included in the ensemble /// Meta or top level model to combine the ensemble models - /// True; the meta learner also receives the original features. + /// True; the meta learner also receives the original features. /// False; the meta learner only receives the output of the ensemble models as features public RegressionStackingEnsembleModel(IPredictorModel[] ensembleModels, IPredictorModel metaModel, bool includeOriginalFeaturesForMetaLearner) { @@ -100,7 +100,7 @@ public Dictionary GetVariableImportance(Dictionary } else { - duplicateModelCount[name] += 1; + duplicateModelCount[name]++; } ensembleFeatureNameToIndex.Add(name + "_" + duplicateModelCount[name].ToString(), index++); diff --git a/src/SharpLearning.Ensemble/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategy.cs b/src/SharpLearning.Ensemble/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategy.cs index 67fde530..0911d4bd 100644 --- a/src/SharpLearning.Ensemble/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategy.cs +++ b/src/SharpLearning.Ensemble/Strategies/GeometricMeanProbabilityClassificationEnsembleStrategy.cs @@ -61,7 +61,6 @@ static double GeometricMean(double[] values) else { geoMean *= values[i]; - } } diff --git a/src/SharpLearning.Ensemble/Strategies/IClassificationEnsembleStrategy.cs b/src/SharpLearning.Ensemble/Strategies/IClassificationEnsembleStrategy.cs index 2fa90891..91198e6e 100644 --- a/src/SharpLearning.Ensemble/Strategies/IClassificationEnsembleStrategy.cs +++ b/src/SharpLearning.Ensemble/Strategies/IClassificationEnsembleStrategy.cs @@ -8,14 +8,14 @@ namespace SharpLearning.Ensemble.Strategies; public interface IClassificationEnsembleStrategy { /// - /// + /// /// /// /// ProbabilityPrediction Combine(ProbabilityPrediction[] ensemblePredictions); /// - /// + /// /// /// /// diff --git a/src/SharpLearning.Ensemble/Strategies/IRegressionEnsembleStrategy.cs b/src/SharpLearning.Ensemble/Strategies/IRegressionEnsembleStrategy.cs index 2117d7a2..7e6295cf 100644 --- a/src/SharpLearning.Ensemble/Strategies/IRegressionEnsembleStrategy.cs +++ b/src/SharpLearning.Ensemble/Strategies/IRegressionEnsembleStrategy.cs @@ -8,14 +8,14 @@ namespace SharpLearning.Ensemble.Strategies; public interface IRegressionEnsembleStrategy { /// - /// + /// /// /// /// double Combine(double[] ensemblePredictions); /// - /// + /// /// /// /// diff --git a/src/SharpLearning.Ensemble/Strategies/MeanRegressionEnsembleStrategy.cs b/src/SharpLearning.Ensemble/Strategies/MeanRegressionEnsembleStrategy.cs index 3b993404..34277816 100644 --- a/src/SharpLearning.Ensemble/Strategies/MeanRegressionEnsembleStrategy.cs +++ b/src/SharpLearning.Ensemble/Strategies/MeanRegressionEnsembleStrategy.cs @@ -11,7 +11,7 @@ namespace SharpLearning.Ensemble.Strategies; public sealed class MeanRegressionEnsembleStrategy : IRegressionEnsembleStrategy { /// - /// + /// /// /// /// @@ -21,7 +21,7 @@ public double Combine(double[] ensemblePredictions) } /// - /// + /// /// /// /// diff --git a/src/SharpLearning.FeatureTransformations.Test/FeatureTransformationExtensionsTest.cs b/src/SharpLearning.FeatureTransformations.Test/FeatureTransformationExtensionsTest.cs index 0bfc0337..b9e85b75 100644 --- a/src/SharpLearning.FeatureTransformations.Test/FeatureTransformationExtensionsTest.cs +++ b/src/SharpLearning.FeatureTransformations.Test/FeatureTransformationExtensionsTest.cs @@ -51,7 +51,6 @@ public void FeatureTransformationExtensions_MatrixTransform() Assert.AreEqual(expected, actual); } - [TestMethod] public void FeatureTransformationExtensions_RowTransform() { diff --git a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs index fa70fa2e..f581e01f 100644 --- a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/DateTimeFeatureTransformer.cs @@ -41,18 +41,11 @@ public DateTimeFeatureTransformer(string dateTimeColumn) /// public DateTimeFeatureTransformer(string dateTimeColumn, DateTime startDate) { - if (startDate == null) { throw new ArgumentNullException("startDate"); } m_dateTimeColumn = dateTimeColumn; m_startDate = startDate; } - /// - /// - /// - public static string[] FeatureNames - { - get => ["Year", "Month", "WeekOfYear", "DayOfMonth", "DayOfWeek", "HourOfDay", "TotalDays", "TotalHours"]; - } + public static string[] FeatureNames => ["Year", "Month", "WeekOfYear", "DayOfMonth", "DayOfWeek", "HourOfDay", "TotalDays", "TotalHours"]; /// /// Transform a data string into numerical features that can be presented to a machine learning algorithm. @@ -65,7 +58,7 @@ public static string[] FeatureNames public IEnumerable Transform(IEnumerable rows) { var rowsList = rows.ToList(); - var newColumnNameToIndex = rowsList.First().ColumnNameToIndex.ToDictionary(v => v.Key, v => v.Value); + var newColumnNameToIndex = rowsList[0].ColumnNameToIndex.ToDictionary(v => v.Key, v => v.Value); var index = newColumnNameToIndex.Count; foreach (var name in FeatureNames) @@ -112,7 +105,6 @@ string[] CreateDateTimeFeatures(DateTime dateTime) FloatingPointConversion.ToString(hours), FloatingPointConversion.ToString(totalDays), FloatingPointConversion.ToString(totalhours), - }; return timeValues; } diff --git a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/MapCategoricalFeaturesTransformer.cs b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/MapCategoricalFeaturesTransformer.cs index 638c987e..722c73e4 100644 --- a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/MapCategoricalFeaturesTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/MapCategoricalFeaturesTransformer.cs @@ -5,7 +5,7 @@ namespace SharpLearning.FeatureTransformations.CsvRowTransforms; /// -/// Maps categorical features to forth running integer values. +/// Maps categorical features to forth running integer values. /// This is usefull for transforming features containing strings into numerical categories. /// For example: [monday, tuesday] -> [0, 1] /// This is needed when the features are used with machine learning algorithms @@ -18,7 +18,7 @@ public sealed class MapCategoricalFeaturesTransformer : ICsvRowTransformer readonly string[] m_columnsToMap; /// - /// + /// /// public MapCategoricalFeaturesTransformer(params string[] columnsToMap) { @@ -28,7 +28,7 @@ public MapCategoricalFeaturesTransformer(params string[] columnsToMap) } /// - /// Maps categorical features to forth running integer values. + /// Maps categorical features to forth running integer values. /// This is usefull for transforming features containing strings into numerical categories. /// For example: [monday, tuesday] -> [0, 1] /// This is needed when the features are used with machine learning algorithms diff --git a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/OneHotTransformer.cs b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/OneHotTransformer.cs index 2506fd30..86f0c17d 100644 --- a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/OneHotTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/OneHotTransformer.cs @@ -58,7 +58,7 @@ public IEnumerable Transform(IEnumerable rows) // add encoded features var newColumnNameToIndex = NewColumnNameToIndex(rows); - var additionalFeatures = m_featureMap.Select(v => v.Value.Count).Sum(); + var additionalFeatures = m_featureMap.Sum(v => v.Value.Count); foreach (var row in rows) { diff --git a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/ReplaceMissingValuesTransformer.cs b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/ReplaceMissingValuesTransformer.cs index 08d59932..68473a70 100644 --- a/src/SharpLearning.FeatureTransformations/CsvRowTransforms/ReplaceMissingValuesTransformer.cs +++ b/src/SharpLearning.FeatureTransformations/CsvRowTransforms/ReplaceMissingValuesTransformer.cs @@ -6,7 +6,7 @@ namespace SharpLearning.FeatureTransformations.CsvRowTransforms; /// -/// Replaces missing values identified with the missing values identifiers. +/// Replaces missing values identified with the missing values identifiers. /// The missing values are replaced by the provided replacement value /// [Serializable] @@ -16,7 +16,7 @@ public sealed class ReplaceMissingValuesTransformer : ICsvRowTransformer readonly string m_replacementValue; /// - /// Replaces missing values identified with the missing values identifiers. + /// Replaces missing values identified with the missing values identifiers. /// The missing values are replaced by the provided replacement value /// /// @@ -30,7 +30,7 @@ public ReplaceMissingValuesTransformer(string replacementValue, params string[] } /// - /// Replaces missing values identified with the missing values identifiers. + /// Replaces missing values identified with the missing values identifiers. /// The missing values are replaced by the provided replacement value /// /// diff --git a/src/SharpLearning.FeatureTransformations/Normalization/LinearNormalizer.cs b/src/SharpLearning.FeatureTransformations/Normalization/LinearNormalizer.cs index 70f29a50..7f341646 100644 --- a/src/SharpLearning.FeatureTransformations/Normalization/LinearNormalizer.cs +++ b/src/SharpLearning.FeatureTransformations/Normalization/LinearNormalizer.cs @@ -19,17 +19,15 @@ public static double Normalize(double newMin, double newMax, double oldMin, doub { if (value == oldMin) { - value = newMin; + return newMin; } else if (value == oldMax) { - value = newMax; + return newMax; } else { - value = newMin + (newMax - newMin) * (value - oldMin) / (oldMax - oldMin); + return newMin + (newMax - newMin) * (value - oldMin) / (oldMax - oldMin); } - - return value; } } diff --git a/src/SharpLearning.GradientBoost.Test/DataSetUtilities.cs b/src/SharpLearning.GradientBoost.Test/DataSetUtilities.cs index 095ebad0..1e452709 100644 --- a/src/SharpLearning.GradientBoost.Test/DataSetUtilities.cs +++ b/src/SharpLearning.GradientBoost.Test/DataSetUtilities.cs @@ -478,5 +478,4 @@ public static (F64Matrix observations, double[] targets) LoadDecisionTreeDataSet 1.52065;14.36;0;2.02;73.42;0;8.44;1.64;0;7 1.51651;14.38;0;1.94;73.61;0;8.48;1.57;0;7 1.51711;14.23;0;2.08;73.36;0;8.62;1.67;0;7"; - } diff --git a/src/SharpLearning.GradientBoost.Test/Learners/ClassificationBinomialGradientBoostLearnerTest.cs b/src/SharpLearning.GradientBoost.Test/Learners/ClassificationBinomialGradientBoostLearnerTest.cs index e39590a4..cee040b7 100644 --- a/src/SharpLearning.GradientBoost.Test/Learners/ClassificationBinomialGradientBoostLearnerTest.cs +++ b/src/SharpLearning.GradientBoost.Test/Learners/ClassificationBinomialGradientBoostLearnerTest.cs @@ -120,7 +120,6 @@ public void ClassificationBinomialGradientBoostLearner_MultiClass_FeaturesPrSpli Assert.AreEqual(0.0514018691588785, actual); } - [TestMethod] public void ClassificationBinomialGradientBoostLearner_MultiClass_Learn_Indexed() { diff --git a/src/SharpLearning.GradientBoost.Test/Learners/ClassificationGradientBoostLearnerTest.cs b/src/SharpLearning.GradientBoost.Test/Learners/ClassificationGradientBoostLearnerTest.cs index 7d521184..f4d287bc 100644 --- a/src/SharpLearning.GradientBoost.Test/Learners/ClassificationGradientBoostLearnerTest.cs +++ b/src/SharpLearning.GradientBoost.Test/Learners/ClassificationGradientBoostLearnerTest.cs @@ -107,6 +107,6 @@ public void ClassificationGradientBoostLearner_LearnWithEarlyStopping() var actual = evaluator.Error(split.TestSet.Targets, predictions); Assert.AreEqual(0.16279069767441862, actual, 0.000001); - Assert.AreEqual(90, model.Trees.First().ToArray().Length); + Assert.AreEqual(90, model.Trees[0].ToArray().Length); } } diff --git a/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs b/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs index abec21eb..52e0a508 100644 --- a/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs +++ b/src/SharpLearning.GradientBoost.Test/Models/ClassificationGradientBoostModelTest.cs @@ -79,7 +79,7 @@ public void ClassificationGradientBoostModel_PredictProbability_Single() Assert.AreEqual(0.038461538461538464, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 }, }), new(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 }, }), new(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 }, }), new(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 }, }), new(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 }, }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new(1, new Dictionary { { 1, 0.987907185249206 }, { 0, 0.0120928147507945 }, }), new(1, new Dictionary { { 1, 0.982783250692275 }, { 0, 0.0172167493077254 }, }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489364 }, }), new(1, new Dictionary { { 1, 0.995341658753364 }, { 0, 0.00465834124663571 }, }), new(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 }, }), new(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 }, }), new(0, new Dictionary { { 1, 0.00048646805791186 }, { 0, 0.999513531942088 }, }), new(1, new Dictionary { { 1, 0.999891769651047 }, { 0, 0.000108230348952856 }, }), new(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 }, }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 }, }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 }, }), new(1, new Dictionary { { 1, 0.993419876193791 }, { 0, 0.00658012380620933 }, }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 }, }), new(1, new Dictionary { { 1, 0.988568859753437 }, { 0, 0.0114311402465632 }, }), new(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 }, }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 } }), new(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 } }), new(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 } }), new(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 } }), new(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 } }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 } }), new(1, new Dictionary { { 1, 0.987907185249206 }, { 0, 0.0120928147507945 } }), new(1, new Dictionary { { 1, 0.982783250692275 }, { 0, 0.0172167493077254 } }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 } }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489364 } }), new(1, new Dictionary { { 1, 0.995341658753364 }, { 0, 0.00465834124663571 } }), new(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 } }), new(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 } }), new(0, new Dictionary { { 1, 0.00048646805791186 }, { 0, 0.999513531942088 } }), new(1, new Dictionary { { 1, 0.999891769651047 }, { 0, 0.000108230348952856 } }), new(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 } }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 } }), new(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 } }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 } }), new(1, new Dictionary { { 1, 0.993419876193791 }, { 0, 0.00658012380620933 } }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 } }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 } }), new(1, new Dictionary { { 1, 0.988568859753437 }, { 0, 0.0114311402465632 } }), new(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 } }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 } }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 } }) }; CollectionAssert.AreEqual(expected, actual); } @@ -99,7 +99,7 @@ public void ClassificationGradientBoostModel_PredictProbability_Multiple() Assert.AreEqual(0.038461538461538464, error, 0.0000001); - var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 }, }), new(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 }, }), new(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 }, }), new(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 }, }), new(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 }, }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new(1, new Dictionary { { 1, 0.987907185249206 }, { 0, 0.0120928147507945 }, }), new(1, new Dictionary { { 1, 0.982783250692275 }, { 0, 0.0172167493077254 }, }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489364 }, }), new(1, new Dictionary { { 1, 0.995341658753364 }, { 0, 0.00465834124663571 }, }), new(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 }, }), new(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 }, }), new(0, new Dictionary { { 1, 0.00048646805791186 }, { 0, 0.999513531942088 }, }), new(1, new Dictionary { { 1, 0.999891769651047 }, { 0, 0.000108230348952856 }, }), new(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 }, }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 }, }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 }, }), new(1, new Dictionary { { 1, 0.993419876193791 }, { 0, 0.00658012380620933 }, }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 }, }), new(1, new Dictionary { { 1, 0.988568859753437 }, { 0, 0.0114311402465632 }, }), new(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 }, }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 }, }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 } }), new(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 } }), new(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 } }), new(0, new Dictionary { { 1, 0.00153419685769873 }, { 0, 0.998465803142301 } }), new(0, new Dictionary { { 1, 0.497135615200052 }, { 0, 0.502864384799948 } }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 } }), new(1, new Dictionary { { 1, 0.987907185249206 }, { 0, 0.0120928147507945 } }), new(1, new Dictionary { { 1, 0.982783250692275 }, { 0, 0.0172167493077254 } }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 } }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489364 } }), new(1, new Dictionary { { 1, 0.995341658753364 }, { 0, 0.00465834124663571 } }), new(0, new Dictionary { { 1, 0.00674291737944022 }, { 0, 0.99325708262056 } }), new(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 } }), new(0, new Dictionary { { 1, 0.00048646805791186 }, { 0, 0.999513531942088 } }), new(1, new Dictionary { { 1, 0.999891769651047 }, { 0, 0.000108230348952856 } }), new(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 } }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 } }), new(0, new Dictionary { { 1, 0.0118633115475969 }, { 0, 0.988136688452403 } }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 } }), new(1, new Dictionary { { 1, 0.993419876193791 }, { 0, 0.00658012380620933 } }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 } }), new(1, new Dictionary { { 1, 0.996417847055106 }, { 0, 0.00358215294489362 } }), new(1, new Dictionary { { 1, 0.988568859753437 }, { 0, 0.0114311402465632 } }), new(0, new Dictionary { { 1, 0.00334655581934884 }, { 0, 0.996653444180651 } }), new(0, new Dictionary { { 1, 0.00428497228545111 }, { 0, 0.995715027714549 } }), new(0, new Dictionary { { 1, 0.00262490179961228 }, { 0, 0.997375098200388 } }) }; CollectionAssert.AreEqual(expected, actual); } @@ -109,7 +109,7 @@ public void ClassificationGradientBoostModel_GetVariableImportance() var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, - { "PreviousExperience_month", 1 } }; + { "PreviousExperience_month", 1 }, }; var learner = new ClassificationGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1, 0, new GradientBoostBinomialLoss(), false); @@ -118,7 +118,7 @@ public void ClassificationGradientBoostModel_GetVariableImportance() var actual = sut.GetVariableImportance(featureNameToIndex); var expected = new Dictionary { {"PreviousExperience_month", 100}, - {"AptitudeTestScore", 56.81853305612 } }; + {"AptitudeTestScore", 56.81853305612 }, }; Assert.AreEqual(expected.Count, actual.Count); var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a }); diff --git a/src/SharpLearning.GradientBoost.Test/Models/RegressionGradientBoostModelTest.cs b/src/SharpLearning.GradientBoost.Test/Models/RegressionGradientBoostModelTest.cs index 3221d1de..0d947c22 100644 --- a/src/SharpLearning.GradientBoost.Test/Models/RegressionGradientBoostModelTest.cs +++ b/src/SharpLearning.GradientBoost.Test/Models/RegressionGradientBoostModelTest.cs @@ -61,7 +61,7 @@ public void RegressionGradientBoostModel_GetVariableImportance() var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, - { "PreviousExperience_month", 1 } }; + { "PreviousExperience_month", 1 }, }; var learner = new RegressionGradientBoostLearner(100, 0.1, 3, 1, 1e-6, 1.0, 0, new GradientBoostSquaredLoss(), false); @@ -70,7 +70,7 @@ public void RegressionGradientBoostModel_GetVariableImportance() var actual = sut.GetVariableImportance(featureNameToIndex); var expected = new Dictionary { { "PreviousExperience_month", 100.0 }, - { "AptitudeTestScore", 72.1682473281495 } }; + { "AptitudeTestScore", 72.1682473281495 }, }; Assert.AreEqual(expected.Count, actual.Count); var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a }); diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs index f2f376df..214ef3ed 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMDecisionTreeLearner.cs @@ -12,7 +12,7 @@ namespace SharpLearning.GradientBoost.GBMDecisionTree; /// -/// +/// /// public sealed class GBMDecisionTreeLearner { @@ -45,7 +45,7 @@ public GBMDecisionTreeLearner( if (minimumInformationGain <= 0) { throw new ArgumentException("minimum information gain must be larger than 0"); } if (minimumSplitSize <= 0) { throw new ArgumentException("minimum split size must be larger than 0"); } if (featuresPrSplit < 0) { throw new ArgumentException("featuresPrSplit must be at least 0"); } - m_loss = loss ?? throw new ArgumentNullException("loss"); + m_loss = loss ?? throw new ArgumentNullException(nameof(loss)); m_maximumTreeDepth = maximumTreeDepth; m_minimumSplitSize = minimumSplitSize; @@ -107,7 +107,7 @@ public GBMTree Learn( RightError = rootValues.Cost, LeftConstant = bestConstant, RightConstant = bestConstant, - SampleCount = rootValues.Samples + SampleCount = rootValues.Samples, }; var nodes = new List { root }; @@ -147,8 +147,7 @@ public GBMTree Learn( SplitValue = -1, Cost = double.MaxValue, LeftConstant = -1, - RightConstant = -1 - + RightConstant = -1, }; if (allFeatureIndices.Length != featuresPrSplit.Length) @@ -163,7 +162,6 @@ public GBMTree Learn( { FindBestSplit(observations, residuals, targets, predictions, orderedElements, parentItem, parentInSample, i, splitResults); - } } else // multi-threaded search for best split @@ -192,10 +190,10 @@ void FindSplit() => SplitWorker( { BestSplit = initBestSplit, Left = GBMSplitInfo.NewEmpty(), - Right = GBMSplitInfo.NewEmpty() + Right = GBMSplitInfo.NewEmpty(), }; - if (splitResults.Count != 0) + if (!splitResults.IsEmpty) { // alternative to for finding bestsplit. gives slightly different results. probably due to order. //GBMSplitResult result; @@ -249,7 +247,7 @@ void FindSplit() => SplitWorker( Values = bestSplitResult.Left.Copy(NodePositionType.Left), InSample = leftInSample, Depth = depth, - Parent = node + Parent = node, }); queue.Enqueue(new GBMTreeCreationItem @@ -257,7 +255,7 @@ void FindSplit() => SplitWorker( Values = bestSplitResult.Right.Copy(NodePositionType.Right), InSample = rightInSample, Depth = depth, - Parent = node + Parent = node, }); } else @@ -268,7 +266,6 @@ void FindSplit() => SplitWorker( var rightInSample = new bool[parentInSample.Length]; var featureIndices = orderedElements[bestSplitResult.BestSplit.FeatureIndex]; - for (var i = 0; i < parentInSample.Length; i++) { if (i < bestSplitResult.BestSplit.SplitIndex) @@ -296,7 +293,9 @@ void FindSplit() => SplitWorker( static void EmpytySplitResults(ConcurrentBag splitResults) { - while (splitResults.TryTake(out GBMSplitResult result)) ; + while (splitResults.TryTake(out GBMSplitResult result)) + { + } } void SplitWorker(F64Matrix observations, @@ -336,7 +335,7 @@ void FindBestSplit(F64Matrix observations, Cost = double.MaxValue, LeftConstant = -1, RightConstant = -1, - SampleCount = parentItem.Values.Samples + SampleCount = parentItem.Values.Samples, }; var bestLeft = GBMSplitInfo.NewEmpty(); @@ -401,7 +400,6 @@ void FindBestSplit(F64Matrix observations, static int NextAllowedIndex(int start, int[] orderedIndexes, bool[] inSample) { - for (var i = start; i < orderedIndexes.Length; i++) { if (inSample[orderedIndexes[i]]) @@ -409,7 +407,7 @@ static int NextAllowedIndex(int start, int[] orderedIndexes, bool[] inSample) return i; } } - return (orderedIndexes.Length + 1); + return orderedIndexes.Length + 1; } static void SetParentLeafIndex(int nodeIndex, GBMTreeCreationItem parentItem) diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMNode.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMNode.cs index 58a5191b..1ca7dd4b 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMNode.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMNode.cs @@ -2,7 +2,6 @@ namespace SharpLearning.GradientBoost.GBMDecisionTree; - /// /// Decision tree node for Gradient boost decision tree /// diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplit.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplit.cs index b6c5d945..e7442811 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplit.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplit.cs @@ -76,7 +76,7 @@ public GBMNode GetNode() LeftConstant = LeftConstant, RightConstant = RightConstant, Depth = Depth, - SampleCount = SampleCount + SampleCount = SampleCount, }; } } diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitInfo.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitInfo.cs index 1142ecee..b288b767 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitInfo.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitInfo.cs @@ -55,7 +55,7 @@ public static GBMSplitInfo NewEmpty() SumOfSquares = 0, Cost = 0, BestConstant = 0, - BinomialSum = 0 + BinomialSum = 0, }; } @@ -71,9 +71,9 @@ public GBMSplitInfo Copy() /// /// Creates a copy of the split info /// - /// + /// /// - public GBMSplitInfo Copy(NodePositionType Position) + public GBMSplitInfo Copy(NodePositionType position) { return new GBMSplitInfo { @@ -82,8 +82,8 @@ public GBMSplitInfo Copy(NodePositionType Position) SumOfSquares = SumOfSquares, Cost = Cost, BestConstant = BestConstant, - Position = Position, - BinomialSum = BinomialSum + Position = position, + BinomialSum = BinomialSum, }; } } diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitResult.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitResult.cs index 00f0aaa0..e1f7de02 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitResult.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMSplitResult.cs @@ -1,7 +1,7 @@ namespace SharpLearning.GradientBoost.GBMDecisionTree; /// -/// Split Results. Contains the best split +/// Split Results. Contains the best split /// and the left and right split information /// public struct GBMSplitResult diff --git a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMTree.cs b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMTree.cs index 2f5e3599..8d2904d4 100644 --- a/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMTree.cs +++ b/src/SharpLearning.GradientBoost/GBMDecisionTree/GBMTree.cs @@ -13,7 +13,7 @@ namespace SharpLearning.GradientBoost.GBMDecisionTree; public class GBMTree { /// - /// + /// /// public readonly List Nodes; @@ -72,14 +72,7 @@ public double Predict(double[] observation) var leaf = Predict(Nodes[1], 1, observation); - if (observation[leaf.FeatureIndex] < leaf.SplitValue) - { - return leaf.LeftConstant; - } - else - { - return leaf.RightConstant; - } + return observation[leaf.FeatureIndex] < leaf.SplitValue ? leaf.LeftConstant : leaf.RightConstant; } /// diff --git a/src/SharpLearning.GradientBoost/Learners/ClassificationBinomialGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/ClassificationBinomialGradientBoostLearner.cs index 230d9ae3..99d15dba 100644 --- a/src/SharpLearning.GradientBoost/Learners/ClassificationBinomialGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/ClassificationBinomialGradientBoostLearner.cs @@ -3,7 +3,7 @@ namespace SharpLearning.GradientBoost.Learners; /// -/// Classification gradient boost learner based on +/// Classification gradient boost learner based on /// http://statweb.stanford.edu/~jhf/ftp/trebst.pdf /// A series of regression trees are fitted stage wise on the residuals of the previous stage. /// The resulting models are ensembled together using addition. Implementation based on: @@ -12,7 +12,7 @@ namespace SharpLearning.GradientBoost.Learners; public class ClassificationBinomialGradientBoostLearner : ClassificationGradientBoostLearner { /// - /// Binomial deviance classification gradient boost learner. + /// Binomial deviance classification gradient boost learner. /// A series of regression trees are fitted stage wise on the residuals of the previous stage /// If multi-class problem, then one-vs-all method is used. /// @@ -21,8 +21,8 @@ public class ClassificationBinomialGradientBoostLearner : ClassificationGradient /// The maximum depth of the tree models /// minimum node split size in the trees 1 is default /// The minimum improvement in information gain before a split is made - /// ratio of observations sampled at each iteration. Default is 1.0. - /// If below 1.0 the algorithm changes to stochastic gradient boosting. + /// ratio of observations sampled at each iteration. Default is 1.0. + /// If below 1.0 the algorithm changes to stochastic gradient boosting. /// This reduces variance in the ensemble and can help counter overfitting /// Number of features used at each split in the tree. 0 means all will be used /// Use multi threading to speed up execution (default is true) diff --git a/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs index f4989029..852cb130 100644 --- a/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/ClassificationGradientBoostLearner.cs @@ -12,7 +12,7 @@ namespace SharpLearning.GradientBoost.Learners; /// -/// Classification gradient boost learner based on +/// Classification gradient boost learner based on /// http://statweb.stanford.edu/~jhf/ftp/trebst.pdf /// A series of regression trees are fitted stage wise on the residuals of the previous stage. /// The resulting models are ensembled together using addition. Implementation based on: @@ -32,7 +32,7 @@ public class ClassificationGradientBoostLearner readonly IGradientBoostLoss m_loss; /// - /// Base classification gradient boost learner. + /// Base classification gradient boost learner. /// A series of regression trees are fitted stage wise on the residuals of the previous stage /// /// The number of iterations or stages @@ -40,8 +40,8 @@ public class ClassificationGradientBoostLearner /// The maximum depth of the tree models /// minimum node split size in the trees 1 is default /// The minimum improvement in information gain before a split is made - /// ratio of observations sampled at each iteration. Default is 1.0. - /// If below 1.0 the algorithm changes to stochastic gradient boosting. + /// ratio of observations sampled at each iteration. Default is 1.0. + /// If below 1.0 the algorithm changes to stochastic gradient boosting. /// This reduces variance in the ensemble and can help outer overfitting /// Number of features used at each split in the tree. 0 means all will be used /// loss function used @@ -74,7 +74,7 @@ public ClassificationGradientBoostLearner( } /// - /// Base classification gradient boost learner. + /// Base classification gradient boost learner. /// A series of regression trees are fitted stage wise on the residuals of the previous stage /// /// The number of iterations or stages @@ -82,8 +82,8 @@ public ClassificationGradientBoostLearner( /// The maximum depth of the tree models /// minimum node split size in the trees 1 is default /// The minimum improvement in information gain before a split is made - /// ratio of observations sampled at each iteration. Default is 1.0. - /// If below 1.0 the algorithm changes to stochastic gradient boosting. + /// ratio of observations sampled at each iteration. Default is 1.0. + /// If below 1.0 the algorithm changes to stochastic gradient boosting. /// This reduces variance in the ensemble and can help counter overfitting /// Number of features used at each split in the tree. 0 means all will be used public ClassificationGradientBoostLearner( @@ -148,7 +148,6 @@ public ClassificationGradientBoostModel Learn(F64Matrix observations, double[] t oneVsAllTargets = new double[1][]; var target = uniqueTargets[0]; oneVsAllTargets[0] = targets.Select(t => t == target ? 1.0 : 0.0).ToArray(); - } else // multi-class case - use oneVsAll strategy and fit probability for each class { @@ -259,7 +258,6 @@ public ClassificationGradientBoostModel LearnWithEarlyStopping( oneVsAllTargets = new double[1][]; var target = uniqueTargets[0]; oneVsAllTargets[0] = trainingTargets.Select(t => t == target ? 1.0 : 0.0).ToArray(); - } else // multi-class case - use oneVsAll strategy and fit probability for each class { @@ -391,7 +389,6 @@ public ClassificationGradientBoostModel LearnWithEarlyStopping( oneVsAllTargets = new double[1][]; var target = uniqueTargets[0]; oneVsAllTargets[0] = trainingTargets.Select(t => t == target ? 1.0 : 0.0).ToArray(); - } else // multi-class case - use oneVsAll strategy and fit probability for each class { diff --git a/src/SharpLearning.GradientBoost/Learners/RegressionAbsoluteLossGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/RegressionAbsoluteLossGradientBoostLearner.cs index f487baf5..ee0a6153 100644 --- a/src/SharpLearning.GradientBoost/Learners/RegressionAbsoluteLossGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/RegressionAbsoluteLossGradientBoostLearner.cs @@ -4,7 +4,7 @@ namespace SharpLearning.GradientBoost.Learners; /// /// -/// Regression gradient boost learner based on +/// Regression gradient boost learner based on /// http://statweb.stanford.edu/~jhf/ftp/trebst.pdf /// A series of regression trees are fitted stage wise on the residuals of the previous stage. /// The resulting models are ensembled together using addition. Implementation based on: @@ -22,10 +22,10 @@ public class RegressionAbsoluteLossGradientBoostLearner : RegressionGradientBoos /// The maximum depth of the tree models /// minimum node split size in the trees 1 is default /// The minimum improvement in information gain before a split is made - /// ratio of observations sampled at each iteration. Default is 1.0. - /// If below 1.0 the algorithm changes to stochastic gradient boosting. + /// ratio of observations sampled at each iteration. Default is 1.0. + /// If below 1.0 the algorithm changes to stochastic gradient boosting. /// This reduces variance in the ensemble and can help outer overfitting - /// Number of features used at each split in the tree. 0 means all will be used + /// Number of features used at each split in the tree. 0 means all will be used /// Use multi threading to speed up execution (default is true) public RegressionAbsoluteLossGradientBoostLearner( int iterations = 100, diff --git a/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs index bac8a1bb..15483efb 100644 --- a/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/RegressionGradientBoostLearner.cs @@ -13,7 +13,7 @@ namespace SharpLearning.GradientBoost.Learners; /// /// -/// Regression gradient boost learner based on +/// Regression gradient boost learner based on /// http://statweb.stanford.edu/~jhf/ftp/trebst.pdf /// A series of regression trees are fitted stage wise on the residuals of the previous stage. /// The resulting models are ensembled together using addition. Implementation based on: @@ -30,7 +30,7 @@ public class RegressionGradientBoostLearner : IIndexedLearner, ILearner< readonly IGradientBoostLoss m_loss; /// - /// Base regression gradient boost learner. + /// Base regression gradient boost learner. /// A series of regression trees are fitted stage wise on the residuals of the previous stage /// /// The number of iterations or stages @@ -38,8 +38,8 @@ public class RegressionGradientBoostLearner : IIndexedLearner, ILearner< /// The maximum depth of the tree models /// minimum node split size in the trees 1 is default /// The minimum improvement in information gain before a split is made - /// ratio of observations sampled at each iteration. Default is 1.0. - /// If below 1.0 the algorithm changes to stochastic gradient boosting. + /// ratio of observations sampled at each iteration. Default is 1.0. + /// If below 1.0 the algorithm changes to stochastic gradient boosting. /// This reduces variance in the ensemble and can help outer overfitting /// Number of features used at each split in the tree. 0 means all will be used /// loss function used @@ -72,7 +72,7 @@ public RegressionGradientBoostLearner( } /// - /// Base regression gradient boost learner. + /// Base regression gradient boost learner. /// A series of regression trees are fitted stage wise on the residuals of the previous stage /// /// The number of iterations or stages @@ -80,8 +80,8 @@ public RegressionGradientBoostLearner( /// The maximum depth of the tree models /// minimum node split size in the trees 1 is default /// The minimum improvement in information gain before a split is made - /// ratio of observations sampled at each iteration. Default is 1.0. - /// If below 1.0 the algorithm changes to stochastic gradient boosting. + /// ratio of observations sampled at each iteration. Default is 1.0. + /// If below 1.0 the algorithm changes to stochastic gradient boosting. /// This reduces variance in the ensemble and can help outer overfitting /// Number of features used at each split in the tree. 0 means all will be used public RegressionGradientBoostLearner( @@ -148,7 +148,6 @@ public RegressionGradientBoostModel Learn(F64Matrix observations, double[] targe trees[iteration] = m_learner.Learn(observations, targets, residuals, predictions, orderedElements, currentInSample); - } else { @@ -230,7 +229,6 @@ public RegressionGradientBoostModel LearnWithEarlyStopping( trees[iteration] = m_learner.Learn(trainingObservations, trainingTargets, residuals, predictions, orderedElements, currentInSample); - } else { diff --git a/src/SharpLearning.GradientBoost/Learners/RegressionHuberLossGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/RegressionHuberLossGradientBoostLearner.cs index 45bc583c..7f7bcaf1 100644 --- a/src/SharpLearning.GradientBoost/Learners/RegressionHuberLossGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/RegressionHuberLossGradientBoostLearner.cs @@ -4,7 +4,7 @@ namespace SharpLearning.GradientBoost.Learners; /// /// -/// Regression gradient boost learner based on +/// Regression gradient boost learner based on /// http://statweb.stanford.edu/~jhf/ftp/trebst.pdf /// A series of regression trees are fitted stage wise on the residuals of the previous stage. /// The resulting models are ensembled together using addition. Implementation based on: @@ -22,10 +22,10 @@ public class RegressionHuberLossGradientBoostLearner : RegressionGradientBoostLe /// The maximum depth of the tree models /// minimum node split size in the trees 1 is default /// The minimum improvement in information gain before a split is made - /// ratio of observations sampled at each iteration. Default is 1.0. - /// If below 1.0 the algorithm changes to stochastic gradient boosting. + /// ratio of observations sampled at each iteration. Default is 1.0. + /// If below 1.0 the algorithm changes to stochastic gradient boosting. /// This reduces variance in the ensemble and can help counter overfitting - /// Number of features used at each split in the tree. 0 means all will be used + /// Number of features used at each split in the tree. 0 means all will be used /// The quantile used for deciding when to switch between square and absolute loss /// Use multi threading to speed up execution (default is true) public RegressionHuberLossGradientBoostLearner( diff --git a/src/SharpLearning.GradientBoost/Learners/RegressionQuantileLossGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/RegressionQuantileLossGradientBoostLearner.cs index 11cdbe08..d4352e23 100644 --- a/src/SharpLearning.GradientBoost/Learners/RegressionQuantileLossGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/RegressionQuantileLossGradientBoostLearner.cs @@ -4,7 +4,7 @@ namespace SharpLearning.GradientBoost.Learners; /// /// -/// Regression gradient boost learner based on +/// Regression gradient boost learner based on /// http://statweb.stanford.edu/~jhf/ftp/trebst.pdf /// A series of regression trees are fitted stage wise on the residuals of the previous stage. /// The resulting models are ensembled together using addition. Implementation based on: @@ -22,11 +22,11 @@ public class RegressionQuantileLossGradientBoostLearner : RegressionGradientBoos /// The maximum depth of the tree models /// minimum node split size in the trees 1 is default /// The minimum improvement in information gain before a split is made - /// ratio of observations sampled at each iteration. Default is 1.0. - /// If below 1.0 the algorithm changes to stochastic gradient boosting. + /// ratio of observations sampled at each iteration. Default is 1.0. + /// If below 1.0 the algorithm changes to stochastic gradient boosting. /// This reduces variance in the ensemble and can help counter overfitting - /// Number of features used at each split in the tree. 0 means all will be used - /// The quantile used in quantile regression. + /// Number of features used at each split in the tree. 0 means all will be used + /// The quantile used in quantile regression. /// 0.5 is the median and corresponds to absolute loss or LAD regression /// Use multi threading to speed up execution (default is true) public RegressionQuantileLossGradientBoostLearner( diff --git a/src/SharpLearning.GradientBoost/Learners/RegressionSquareLossGradientBoostLearner.cs b/src/SharpLearning.GradientBoost/Learners/RegressionSquareLossGradientBoostLearner.cs index 83549e79..ffdd5d54 100644 --- a/src/SharpLearning.GradientBoost/Learners/RegressionSquareLossGradientBoostLearner.cs +++ b/src/SharpLearning.GradientBoost/Learners/RegressionSquareLossGradientBoostLearner.cs @@ -4,7 +4,7 @@ namespace SharpLearning.GradientBoost.Learners; /// /// -/// Regression gradient boost learner based on +/// Regression gradient boost learner based on /// http://statweb.stanford.edu/~jhf/ftp/trebst.pdf /// A series of regression trees are fitted stage wise on the residuals of the previous stage. /// The resulting models are ensembled together using addition. Implementation based on: @@ -14,7 +14,7 @@ namespace SharpLearning.GradientBoost.Learners; public class RegressionSquareLossGradientBoostLearner : RegressionGradientBoostLearner { /// - /// Square loss/Least squares (LS) regression gradient boost learner. + /// Square loss/Least squares (LS) regression gradient boost learner. /// A series of regression trees are fitted stage wise on the residuals of the previous stage /// /// The number of iterations or stages @@ -22,10 +22,10 @@ public class RegressionSquareLossGradientBoostLearner : RegressionGradientBoostL /// The maximum depth of the tree models /// minimum node split size in the trees 1 is default /// The minimum improvement in information gain before a split is made - /// ratio of observations sampled at each iteration. Default is 1.0. - /// If below 1.0 the algorithm changes to stochastic gradient boosting. + /// ratio of observations sampled at each iteration. Default is 1.0. + /// If below 1.0 the algorithm changes to stochastic gradient boosting. /// This reduces variance in the ensemble and can help outer overfitting - /// Number of features used at each split in the tree. 0 means all will be used + /// Number of features used at each split in the tree. 0 means all will be used /// Use multi threading to speed up execution (default is true) public RegressionSquareLossGradientBoostLearner( int iterations = 100, diff --git a/src/SharpLearning.GradientBoost/Loss/GradientBoostAbsoluteLoss.cs b/src/SharpLearning.GradientBoost/Loss/GradientBoostAbsoluteLoss.cs index 17253730..497691fd 100644 --- a/src/SharpLearning.GradientBoost/Loss/GradientBoostAbsoluteLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/GradientBoostAbsoluteLoss.cs @@ -5,15 +5,15 @@ namespace SharpLearning.GradientBoost.Loss; /// -/// Least absolute deviation (LAD) loss function. LAD gives equal emphasis to all observations. +/// Least absolute deviation (LAD) loss function. LAD gives equal emphasis to all observations. /// This makes LAD robust against outliers. /// http://en.wikipedia.org/wiki/Least_absolute_deviations /// public sealed class GradientBoostAbsoluteLoss : IGradientBoostLoss { /// - /// Least absolute deviation (LAD) loss function. LAD gives equal emphasis to all observations. - /// This makes LAD robust against outliers. LAD regression is also sometimes known as robust regression. + /// Least absolute deviation (LAD) loss function. LAD gives equal emphasis to all observations. + /// This makes LAD robust against outliers. LAD regression is also sometimes known as robust regression. /// http://en.wikipedia.org/wiki/Least_absolute_deviations /// public GradientBoostAbsoluteLoss() @@ -40,13 +40,6 @@ public double InitialLoss(double[] targets, bool[] inSample) return values.ToArray().Median(); } - /// - /// - /// - /// - /// - /// - /// public GBMSplitInfo InitSplit(double[] targets, double[] residuals, bool[] inSample) { var splitInfo = GBMSplitInfo.NewEmpty(); @@ -78,23 +71,9 @@ public GBMSplitInfo InitSplit(double[] targets, double[] residuals, bool[] inSam public double NegativeGradient(double target, double prediction) { var value = target - prediction; - if (value > 0.0) - { - return 1.0; - } - else - { - return -1.0; - } + return value > 0.0 ? 1.0 : -1.0; } - /// - /// - /// - /// - /// - /// - /// public void UpdateResiduals(double[] targets, double[] predictions, double[] residuals, bool[] inSample) { @@ -107,13 +86,6 @@ public void UpdateResiduals(double[] targets, double[] predictions, } } - /// - /// - /// - /// - /// - /// - /// public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, double target, double residual) { @@ -154,10 +126,6 @@ public double UpdatedLeafValue(double currentLeafValue, double[] targets, return values.ToArray().Median(); } - /// - /// - /// - /// public bool UpdateLeafValues() { return true; diff --git a/src/SharpLearning.GradientBoost/Loss/GradientBoostBinomialLoss.cs b/src/SharpLearning.GradientBoost/Loss/GradientBoostBinomialLoss.cs index a6a6a0de..e7a1eb70 100644 --- a/src/SharpLearning.GradientBoost/Loss/GradientBoostBinomialLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/GradientBoostBinomialLoss.cs @@ -43,7 +43,7 @@ public double InitialLoss(double[] targets, bool[] inSample) } /// - /// + /// /// /// /// @@ -76,7 +76,7 @@ public GBMSplitInfo InitSplit(double[] targets, double[] residuals, bool[] inSam } /// - /// + /// /// /// /// @@ -87,7 +87,7 @@ public double NegativeGradient(double target, double prediction) } /// - /// + /// /// /// /// @@ -106,7 +106,7 @@ public void UpdateResiduals(double[] targets, double[] predictions, } /// - /// + /// /// /// /// @@ -135,14 +135,7 @@ public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, static double BinomialBestConstant(double sum, double binomialSum) { - if (binomialSum != 0.0) - { - return sum / binomialSum; - } - else - { - return 0.0; - } + return binomialSum != 0.0 ? sum / binomialSum : 0.0; } /// diff --git a/src/SharpLearning.GradientBoost/Loss/GradientBoostHuberLoss.cs b/src/SharpLearning.GradientBoost/Loss/GradientBoostHuberLoss.cs index 7fc296c4..34fe8ec2 100644 --- a/src/SharpLearning.GradientBoost/Loss/GradientBoostHuberLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/GradientBoostHuberLoss.cs @@ -7,9 +7,9 @@ namespace SharpLearning.GradientBoost.Loss; /// -/// Huber loss is a combination of Squared loss and least absolute deviation (LAD). -/// For small residuals (below quantile defined by alpha) squared loss is used. -/// For large residuals (above quantile defined by alpha) LAD loss is used. +/// Huber loss is a combination of Squared loss and least absolute deviation (LAD). +/// For small residuals (below quantile defined by alpha) squared loss is used. +/// For large residuals (above quantile defined by alpha) LAD loss is used. /// This makes Huber loss robust against outliers while still having much of the sensitivity of squared loss. /// http://en.wikipedia.org/wiki/Huber_loss /// @@ -18,9 +18,9 @@ public sealed class GradientBoostHuberLoss : IGradientBoostLoss double m_gamma; readonly double m_alpha; - /// Huber loss is a combination of Squared loss and least absolute deviation (LAD). - /// For small residuals (below quantile defined by alpha) squared loss is used. - /// For large residuals (above quantile defined by alpha) LAD loss is used. + /// Huber loss is a combination of Squared loss and least absolute deviation (LAD). + /// For small residuals (below quantile defined by alpha) squared loss is used. + /// For large residuals (above quantile defined by alpha) LAD loss is used. /// This makes Huber loss robust against outliers while still having much of the sensitivity of squared loss. /// http://en.wikipedia.org/wiki/Huber_loss public GradientBoostHuberLoss(double alpha = 0.9) @@ -50,7 +50,7 @@ public double InitialLoss(double[] targets, bool[] inSample) } /// - /// + /// /// /// /// @@ -90,7 +90,7 @@ public double NegativeGradient(double target, double prediction) } /// - /// + /// /// /// /// @@ -135,7 +135,7 @@ public void UpdateResiduals(double[] targets, double[] predictions, } /// - /// + /// /// /// /// @@ -158,7 +158,7 @@ public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, } /// - /// + /// /// /// public bool UpdateLeafValues() @@ -167,7 +167,7 @@ public bool UpdateLeafValues() } /// - /// + /// /// /// /// diff --git a/src/SharpLearning.GradientBoost/Loss/GradientBoostQuantileLoss.cs b/src/SharpLearning.GradientBoost/Loss/GradientBoostQuantileLoss.cs index f949e365..03c2fca0 100644 --- a/src/SharpLearning.GradientBoost/Loss/GradientBoostQuantileLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/GradientBoostQuantileLoss.cs @@ -6,8 +6,8 @@ namespace SharpLearning.GradientBoost.Loss; /// -/// Quantile loss. Whereas the method of least squares results in estimates that approximate the conditional mean of the response variable -/// given certain values of the predictor variables, quantile regression aims at estimating either the conditional median +/// Quantile loss. Whereas the method of least squares results in estimates that approximate the conditional mean of the response variable +/// given certain values of the predictor variables, quantile regression aims at estimating either the conditional median /// or other quantiles of the response variable. Using the median results in Least absolute deviation or LAD loss. /// public sealed class GradientBoostQuantileLoss : IGradientBoostLoss @@ -15,8 +15,8 @@ public sealed class GradientBoostQuantileLoss : IGradientBoostLoss readonly double m_alpha; /// - /// Quantile loss. Whereas the method of least squares results in estimates that approximate the conditional mean of the response variable - /// given certain values of the predictor variables, quantile regression aims at estimating either the conditional median + /// Quantile loss. Whereas the method of least squares results in estimates that approximate the conditional mean of the response variable + /// given certain values of the predictor variables, quantile regression aims at estimating either the conditional median /// or other quantiles of the response variable. Using the median results in Least absolute deviation or LAD loss. /// /// @@ -47,7 +47,7 @@ public double InitialLoss(double[] targets, bool[] inSample) } /// - /// + /// /// /// /// @@ -83,19 +83,11 @@ public GBMSplitInfo InitSplit(double[] targets, double[] residuals, bool[] inSam /// public double NegativeGradient(double target, double prediction) { - if (target > prediction) - { - return m_alpha; - } - else - { - return -(1.0 - m_alpha); - } - + return target > prediction ? m_alpha : -(1.0 - m_alpha); } /// - /// + /// /// /// /// @@ -114,7 +106,7 @@ public void UpdateResiduals(double[] targets, double[] predictions, } /// - /// + /// /// /// /// @@ -137,7 +129,7 @@ public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, } /// - /// + /// /// /// public bool UpdateLeafValues() diff --git a/src/SharpLearning.GradientBoost/Loss/GradientBoostSquaredLoss.cs b/src/SharpLearning.GradientBoost/Loss/GradientBoostSquaredLoss.cs index 036762a8..249ad3cc 100644 --- a/src/SharpLearning.GradientBoost/Loss/GradientBoostSquaredLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/GradientBoostSquaredLoss.cs @@ -16,11 +16,10 @@ public sealed class GradientBoostSquaredLoss : IGradientBoostLoss /// public GradientBoostSquaredLoss() { - } /// - /// Initial loss is the mean of the targets + /// Initial loss is the mean of the targets /// /// /// @@ -42,7 +41,7 @@ public double InitialLoss(double[] targets, bool[] inSample) } /// - /// + /// /// /// /// @@ -83,7 +82,7 @@ public double NegativeGradient(double target, double prediction) } /// - /// + /// /// /// /// @@ -102,7 +101,7 @@ public void UpdateResiduals(double[] targets, double[] predictions, } /// - /// + /// /// /// /// @@ -127,7 +126,6 @@ public void UpdateSplitConstants(ref GBMSplitInfo left, ref GBMSplitInfo right, //left.Cost = left.SumOfSquares - (left.Sum * left.Sum * leftSamplesInv); //left.BestConstant = left.Sum * leftSamplesInv; - right.Samples--; right.Sum -= residual; right.SumOfSquares -= residual2; diff --git a/src/SharpLearning.GradientBoost/Loss/IGradientBoostLoss.cs b/src/SharpLearning.GradientBoost/Loss/IGradientBoostLoss.cs index 202f57dd..90c685c9 100644 --- a/src/SharpLearning.GradientBoost/Loss/IGradientBoostLoss.cs +++ b/src/SharpLearning.GradientBoost/Loss/IGradientBoostLoss.cs @@ -8,7 +8,7 @@ namespace SharpLearning.GradientBoost.Loss; public interface IGradientBoostLoss { /// - /// Calculate the initial, constant, loss based on the targets and the samples used + /// Calculate the initial, constant, loss based on the targets and the samples used /// /// /// diff --git a/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs b/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs index 1f0ea628..3d463bcf 100644 --- a/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs +++ b/src/SharpLearning.GradientBoost/Models/ClassificationGradientBoostModel.cs @@ -11,7 +11,7 @@ namespace SharpLearning.GradientBoost.Models; /// -/// +/// /// [Serializable] public sealed class ClassificationGradientBoostModel @@ -19,32 +19,32 @@ public sealed class ClassificationGradientBoostModel , IPredictorModel { /// - /// + /// /// public readonly GBMTree[][] Trees; /// - /// + /// /// public readonly double LearningRate; /// - /// + /// /// public readonly double InitialLoss; /// - /// + /// /// public readonly double[] TargetNames; /// - /// + /// /// public readonly int FeatureCount; /// - /// + /// /// /// /// @@ -73,14 +73,7 @@ public ClassificationGradientBoostModel( /// public double Predict(double[] observation) { - if (TargetNames.Length == 2) - { - return BinaryPredict(observation); - } - else - { - return MultiClassPredict(observation); - } + return TargetNames.Length == 2 ? BinaryPredict(observation) : MultiClassPredict(observation); } /// @@ -90,14 +83,7 @@ public double Predict(double[] observation) /// public ProbabilityPrediction PredictProbability(double[] observation) { - if (TargetNames.Length == 2) - { - return BinaryProbabilityPredict(observation); - } - else - { - return MultiClassProbabilityPredict(observation); - } + return TargetNames.Length == 2 ? BinaryProbabilityPredict(observation) : MultiClassProbabilityPredict(observation); } /// diff --git a/src/SharpLearning.GradientBoost/Models/RegressionGradientBoostModel.cs b/src/SharpLearning.GradientBoost/Models/RegressionGradientBoostModel.cs index d7d6d4c9..538ed972 100644 --- a/src/SharpLearning.GradientBoost/Models/RegressionGradientBoostModel.cs +++ b/src/SharpLearning.GradientBoost/Models/RegressionGradientBoostModel.cs @@ -10,33 +10,33 @@ namespace SharpLearning.GradientBoost.Models; /// -/// +/// /// [Serializable] public sealed class RegressionGradientBoostModel : IPredictorModel { /// - /// + /// /// public readonly GBMTree[] Trees; /// - /// + /// /// public readonly double LearningRate; /// - /// + /// /// public readonly double InitialLoss; /// - /// + /// /// public readonly int FeatureCount; /// - /// + /// /// /// /// @@ -91,10 +91,10 @@ public double[] Predict(F64Matrix observations) /// public Dictionary GetVariableImportance(Dictionary featureNameToIndex) { - var m_rawVariableImportance = GetRawVariableImportance(); - var max = m_rawVariableImportance.Max(); + var rawVariableImportance = GetRawVariableImportance(); + var max = rawVariableImportance.Max(); - var scaledVariableImportance = m_rawVariableImportance + var scaledVariableImportance = rawVariableImportance .Select(v => (v / max) * 100.0) .ToArray(); diff --git a/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs b/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs index 57f9d5a7..dff50afd 100644 --- a/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs +++ b/src/SharpLearning.InputOutput.Test/Csv/CsvParserTest.cs @@ -121,7 +121,7 @@ static List Expected_NoHeader() { new(columnNameToIndex, ["1", "15", "0"]), new(columnNameToIndex, ["1", "12", "0"]), - new(columnNameToIndex, ["4", "6", "0"]) + new(columnNameToIndex, ["4", "6", "0"]), }; return expected; @@ -133,14 +133,14 @@ static List Expected() { { "AptitudeTestScore", 0 }, { "PreviousExperience_month", 1 }, - { "Pass", 2 } + { "Pass", 2 }, }; var expected = new List { new(columnNameToIndex, ["5", "2", "1"]), new(columnNameToIndex, ["1", "12", "0"]), - new(columnNameToIndex, ["3", "18", "0"]) + new(columnNameToIndex, ["3", "18", "0"]), }; return expected; @@ -151,14 +151,14 @@ static List Expected_ColumnNames() var columnNameToIndex = new Dictionary { { "PreviousExperience_month", 0 }, - { "Pass", 1 } + { "Pass", 1 }, }; var expected = new List { new(columnNameToIndex, ["2", "1"]), new(columnNameToIndex, ["12", "0"]), - new(columnNameToIndex, ["18", "0"]) + new(columnNameToIndex, ["18", "0"]), }; return expected; @@ -172,7 +172,7 @@ static List Expected_Select_ColumnNames() { new(columnNameToIndex, ["1"]), new(columnNameToIndex, ["0"]), - new(columnNameToIndex, ["0"]) + new(columnNameToIndex, ["0"]), }; return expected; @@ -185,7 +185,7 @@ static List Expected_Quote_Inclosed_Columns() var expected = new List { new(columnNameToIndex, ["1", "2", "3"]), - new(columnNameToIndex, ["10", "20", "30"]) + new(columnNameToIndex, ["10", "20", "30"]), }; return expected; @@ -198,7 +198,7 @@ static List Expected_Quote_Inclosed_Columns_Separator_In_Text() var expected = new List { new(columnNameToIndex, ["1", "the following dates;1. jan, 1. april", "3"]), - new(columnNameToIndex, ["10", "20", "30"]) + new(columnNameToIndex, ["10", "20", "30"]), }; return expected; diff --git a/src/SharpLearning.InputOutput.Test/Csv/CsvRowExtensionsTest.cs b/src/SharpLearning.InputOutput.Test/Csv/CsvRowExtensionsTest.cs index 3c646d59..d8e7549c 100644 --- a/src/SharpLearning.InputOutput.Test/Csv/CsvRowExtensionsTest.cs +++ b/src/SharpLearning.InputOutput.Test/Csv/CsvRowExtensionsTest.cs @@ -11,17 +11,17 @@ namespace SharpLearning.InputOutput.Test.Csv; [TestClass] public class CsvRowExtensionsTest { - static readonly string[] m_data = ["1", "2", "3", "4"]; - static readonly Dictionary m_columnNameToIndex = new() { { "1", 0 }, { "2", 1 }, { "3", 2 }, { "4", 3 } }; - readonly F64Matrix m_expectedF64Matrix = new(m_data.Select(value => CsvRowExtensions.DefaultF64Converter(value)).ToArray(), 1, 4); - readonly StringMatrix m_expectedStringMatrix = new(m_data, 1, 4); + static readonly string[] Data = ["1", "2", "3", "4"]; + static readonly Dictionary ColumnNameToIndex = new() { { "1", 0 }, { "2", 1 }, { "3", 2 }, { "4", 3 } }; + readonly F64Matrix m_expectedF64Matrix = new(Data.Select(value => CsvRowExtensions.DefaultF64Converter(value)).ToArray(), 1, 4); + readonly StringMatrix m_expectedStringMatrix = new(Data, 1, 4); readonly string m_expectedWrite = "1;2;3;4\r\n1;2;3;4"; [TestMethod] public void CsvRowExtensions_GetValues() { - var sut = new CsvRow(m_columnNameToIndex, m_data); + var sut = new CsvRow(ColumnNameToIndex, Data); var actual = sut.GetValues(["1", "3"]); var expected = new string[] { "1", "3" }; CollectionAssert.AreEqual(expected, actual); @@ -30,7 +30,7 @@ public void CsvRowExtensions_GetValues() [TestMethod] public void CsvRowExtensions_SetValue() { - var sut = new CsvRow(m_columnNameToIndex, m_data.ToArray()); + var sut = new CsvRow(ColumnNameToIndex, Data.ToArray()); sut.SetValue("3", "33"); var actual = sut.GetValue("3"); @@ -40,7 +40,7 @@ public void CsvRowExtensions_SetValue() [TestMethod] public void CsvRowExtensions_GetValue() { - var sut = new CsvRow(m_columnNameToIndex, m_data); + var sut = new CsvRow(ColumnNameToIndex, Data); var actual = sut.GetValue("3"); var expected = "3"; Assert.AreEqual(expected, actual); @@ -49,7 +49,7 @@ public void CsvRowExtensions_GetValue() [TestMethod] public void CsvRowExtensions_Keep() { - var sut = new List { new(m_columnNameToIndex, m_data) }; + var sut = new List { new(ColumnNameToIndex, Data) }; var actual = sut.Keep("1", "2").ToList().First(); var expected = new CsvRow( @@ -62,7 +62,7 @@ public void CsvRowExtensions_Keep() [TestMethod] public void CsvRowExtensions_Remove() { - var sut = new List { new(m_columnNameToIndex, m_data) }; + var sut = new List { new(ColumnNameToIndex, Data) }; var actual = sut.Remove("3").ToList().First(); var expected = new CsvRow( @@ -75,7 +75,7 @@ public void CsvRowExtensions_Remove() [TestMethod] public void CsvRowExtensions_ToF64Matrix() { - var sut = new List { new(m_columnNameToIndex, m_data) }; + var sut = new List { new(ColumnNameToIndex, Data) }; var actual = sut.ToF64Matrix(); Assert.AreEqual(m_expectedF64Matrix, actual); } @@ -83,12 +83,11 @@ public void CsvRowExtensions_ToF64Matrix() [TestMethod] public void CsvRowExtensions_ToStringMatrix() { - var sut = new List { new(m_columnNameToIndex, m_data) }; + var sut = new List { new(ColumnNameToIndex, Data) }; var actual = sut.ToStringMatrix(); Assert.AreEqual(m_expectedStringMatrix, actual); } - [TestMethod] public void CsvRowExtensions_ToF64Vector() { @@ -114,13 +113,14 @@ public void CsvRowExtensions_ToStringVector() var actual = sut.EnumerateRows("one") .ToStringVector(); - CollectionAssert.AreEqual(new string[] { "1" }, actual); + var expected = new string[] { "1" }; + CollectionAssert.AreEqual(expected, actual); } [TestMethod] public void CsvRowExtensions_Write() { - var sut = new List { new(m_columnNameToIndex, m_data) }; + var sut = new List { new(ColumnNameToIndex, Data) }; var writer = new StringWriter(); sut.Write(() => writer); @@ -146,7 +146,6 @@ public void CsvRowExtensions_KeyCombine_KeepRepeatedColumns() var expected = "Date;Open;High;Low;Close;Volume;Adj Close;Date_1;Open_1;High_1;Low_1;Close_1;Volume_1;Adj Close_1\r\n2014-04-29;38.01;39.68;36.80;38.00;294200;38.00;2014-04-29;22.05;22.44;21.72;21.78;81900;21.78\r\n2014-04-28;38.26;39.36;37.30;37.83;361900;37.83;2014-04-28;21.79;22.00;21.46;21.90;71100;21.90\r\n2014-04-25;38.33;39.04;37.88;38.00;342900;38.00;2014-04-25;22.10;22.48;21.67;21.78;77500;21.78\r\n2014-04-24;39.33;39.59;37.91;38.82;362200;38.82;2014-04-24;22.61;22.70;22.20;22.23;48700;22.23\r\n2014-04-23;38.98;39.58;38.50;38.88;245800;38.88;2014-04-23;22.26;22.95;22.16;22.60;99400;22.60\r\n2014-04-22;38.43;39.79;38.31;38.99;358000;38.99;2014-04-22;22.19;22.70;22.13;22.48;69200;22.48\r\n2014-04-21;38.05;38.74;37.77;38.41;316800;38.41;2014-04-21;22.28;22.54;22.05;22.24;31100;22.24\r\n2014-04-17;37.25;38.24;36.92;38.05;233700;38.05;2014-04-17;22.30;22.40;22.15;22.26;47400;22.26\r\n2014-04-16;36.37;37.27;36.17;37.26;144800;37.26;2014-04-16;22.59;22.74;22.09;22.35;46600;22.35\r\n2014-04-15;36.08;36.74;35.09;36.05;223100;36.05;2014-04-15;22.46;22.74;21.95;22.35;40800;22.35\r\n2014-04-14;36.55;36.90;35.33;36.02;296100;36.02;2014-04-14;22.65;22.82;22.16;22.45;84600;22.45\r\n2014-04-11;36.26;37.09;36.08;36.13;282700;36.13;2014-04-11;22.31;22.69;22.28;22.43;66600;22.43\r\n2014-04-10;37.06;37.16;36.13;36.46;309800;36.46;2014-04-10;23.11;23.25;22.39;22.56;88800;22.56\r\n2014-04-09;36.08;37.26;35.66;37.13;209400;37.13;2014-04-09;23.15;23.30;22.95;23.18;58600;23.18\r\n2014-04-08;35.50;36.16;35.28;35.85;215700;35.85;2014-04-08;23.04;23.68;23.00;23.11;56200;23.11\r\n2014-04-07;36.49;37.30;35.27;35.48;312400;35.48;2014-04-07;23.41;23.73;23.01;23.09;61500;23.09\r\n2014-04-04;38.39;38.90;36.60;36.93;306500;36.93;2014-04-04;24.00;24.05;23.37;23.44;188500;23.44\r\n2014-04-03;38.62;39.78;37.90;38.14;269800;38.14;2014-04-03;23.97;23.97;23.77;23.90;43600;23.90\r\n2014-04-02;38.66;38.84;38.04;38.56;398200;38.56;2014-04-02;23.70;23.92;23.51;23.88;74700;23.88\r\n2014-04-01;37.21;38.65;36.58;38.49;410900;38.49;2014-04-01;23.34;23.87;23.13;23.75;146100;23.75"; Assert.AreEqual(expected, actual); - var actualColumnNameToIndex = rows.First().ColumnNameToIndex; var expectedColumnNameToIndex = new Dictionary { { "Date", 0 }, { "Open", 1 }, { "High", 2 }, { "Low", 3 }, { "Close", 4 }, { "Volume", 5 }, { "Adj Close", 6 }, { "Date_1", 7 }, { "Open_1", 8 }, { "High_1", 9 }, { "Low_1", 10 }, { "Close_1", 11 }, { "Volume_1", 12 }, { "Adj Close_1", 13 } }; @@ -170,7 +169,6 @@ public void CsvRowExtensions_KeyCombine() var expected = "Date;Open;High;Low;Close;Volume;Adj Close;OpenOther;CloseOther\r\n2014-04-29;38.01;39.68;36.80;38.00;294200;38.00;22.05;21.78\r\n2014-04-28;38.26;39.36;37.30;37.83;361900;37.83;21.79;21.90\r\n2014-04-25;38.33;39.04;37.88;38.00;342900;38.00;22.10;21.78\r\n2014-04-24;39.33;39.59;37.91;38.82;362200;38.82;22.61;22.23\r\n2014-04-23;38.98;39.58;38.50;38.88;245800;38.88;22.26;22.60\r\n2014-04-22;38.43;39.79;38.31;38.99;358000;38.99;22.19;22.48\r\n2014-04-21;38.05;38.74;37.77;38.41;316800;38.41;22.28;22.24\r\n2014-04-17;37.25;38.24;36.92;38.05;233700;38.05;22.30;22.26\r\n2014-04-16;36.37;37.27;36.17;37.26;144800;37.26;22.59;22.35\r\n2014-04-15;36.08;36.74;35.09;36.05;223100;36.05;22.46;22.35\r\n2014-04-14;36.55;36.90;35.33;36.02;296100;36.02;22.65;22.45\r\n2014-04-11;36.26;37.09;36.08;36.13;282700;36.13;22.31;22.43\r\n2014-04-10;37.06;37.16;36.13;36.46;309800;36.46;23.11;22.56\r\n2014-04-09;36.08;37.26;35.66;37.13;209400;37.13;23.15;23.18\r\n2014-04-08;35.50;36.16;35.28;35.85;215700;35.85;23.04;23.11\r\n2014-04-07;36.49;37.30;35.27;35.48;312400;35.48;23.41;23.09\r\n2014-04-04;38.39;38.90;36.60;36.93;306500;36.93;24.00;23.44\r\n2014-04-03;38.62;39.78;37.90;38.14;269800;38.14;23.97;23.90\r\n2014-04-02;38.66;38.84;38.04;38.56;398200;38.56;23.70;23.88\r\n2014-04-01;37.21;38.65;36.58;38.49;410900;38.49;23.34;23.75"; Assert.AreEqual(expected, actual); - var actualColumnNameToIndex = rows.First().ColumnNameToIndex; var expectedColumnNameToIndex = new Dictionary { { "Date", 0 }, { "Open", 1 }, { "High", 2 }, { "Low", 3 }, { "Close", 4 }, { "Volume", 5 }, { "Adj Close", 6 }, { "OpenOther", 7 }, { "CloseOther", 8 } }; @@ -194,7 +192,6 @@ public void CsvRowExtensions_KeyCombine_KeepRepeatedColumns_Dict() var expected = "Date;Open;High;Low;Close;Volume;Adj Close;Date_1;Open_1;High_1;Low_1;Close_1;Volume_1;Adj Close_1\r\n2014-04-29;38.01;39.68;36.80;38.00;294200;38.00;2014-04-29;22.05;22.44;21.72;21.78;81900;21.78\r\n2014-04-28;38.26;39.36;37.30;37.83;361900;37.83;2014-04-28;21.79;22.00;21.46;21.90;71100;21.90\r\n2014-04-25;38.33;39.04;37.88;38.00;342900;38.00;2014-04-25;22.10;22.48;21.67;21.78;77500;21.78\r\n2014-04-24;39.33;39.59;37.91;38.82;362200;38.82;2014-04-24;22.61;22.70;22.20;22.23;48700;22.23\r\n2014-04-23;38.98;39.58;38.50;38.88;245800;38.88;2014-04-23;22.26;22.95;22.16;22.60;99400;22.60\r\n2014-04-22;38.43;39.79;38.31;38.99;358000;38.99;2014-04-22;22.19;22.70;22.13;22.48;69200;22.48\r\n2014-04-21;38.05;38.74;37.77;38.41;316800;38.41;2014-04-21;22.28;22.54;22.05;22.24;31100;22.24\r\n2014-04-17;37.25;38.24;36.92;38.05;233700;38.05;2014-04-17;22.30;22.40;22.15;22.26;47400;22.26\r\n2014-04-16;36.37;37.27;36.17;37.26;144800;37.26;2014-04-16;22.59;22.74;22.09;22.35;46600;22.35\r\n2014-04-15;36.08;36.74;35.09;36.05;223100;36.05;2014-04-15;22.46;22.74;21.95;22.35;40800;22.35\r\n2014-04-14;36.55;36.90;35.33;36.02;296100;36.02;2014-04-14;22.65;22.82;22.16;22.45;84600;22.45\r\n2014-04-11;36.26;37.09;36.08;36.13;282700;36.13;2014-04-11;22.31;22.69;22.28;22.43;66600;22.43\r\n2014-04-10;37.06;37.16;36.13;36.46;309800;36.46;2014-04-10;23.11;23.25;22.39;22.56;88800;22.56\r\n2014-04-09;36.08;37.26;35.66;37.13;209400;37.13;2014-04-09;23.15;23.30;22.95;23.18;58600;23.18\r\n2014-04-08;35.50;36.16;35.28;35.85;215700;35.85;2014-04-08;23.04;23.68;23.00;23.11;56200;23.11\r\n2014-04-07;36.49;37.30;35.27;35.48;312400;35.48;2014-04-07;23.41;23.73;23.01;23.09;61500;23.09\r\n2014-04-04;38.39;38.90;36.60;36.93;306500;36.93;2014-04-04;24.00;24.05;23.37;23.44;188500;23.44\r\n2014-04-03;38.62;39.78;37.90;38.14;269800;38.14;2014-04-03;23.97;23.97;23.77;23.90;43600;23.90\r\n2014-04-02;38.66;38.84;38.04;38.56;398200;38.56;2014-04-02;23.70;23.92;23.51;23.88;74700;23.88\r\n2014-04-01;37.21;38.65;36.58;38.49;410900;38.49;2014-04-01;23.34;23.87;23.13;23.75;146100;23.75"; Assert.AreEqual(expected, actual); - var actualColumnNameToIndex = rows.First().ColumnNameToIndex; var expectedColumnNameToIndex = new Dictionary { { "Date", 0 }, { "Open", 1 }, { "High", 2 }, { "Low", 3 }, { "Close", 4 }, { "Volume", 5 }, { "Adj Close", 6 }, { "Date_1", 7 }, { "Open_1", 8 }, { "High_1", 9 }, { "Low_1", 10 }, { "Close_1", 11 }, { "Volume_1", 12 }, { "Adj Close_1", 13 } }; @@ -218,7 +215,6 @@ public void CsvRowExtensions_KeyCombine_Dict() var expected = "Date;Open;High;Low;Close;Volume;Adj Close;OpenOther;CloseOther\r\n2014-04-29;38.01;39.68;36.80;38.00;294200;38.00;22.05;21.78\r\n2014-04-28;38.26;39.36;37.30;37.83;361900;37.83;21.79;21.90\r\n2014-04-25;38.33;39.04;37.88;38.00;342900;38.00;22.10;21.78\r\n2014-04-24;39.33;39.59;37.91;38.82;362200;38.82;22.61;22.23\r\n2014-04-23;38.98;39.58;38.50;38.88;245800;38.88;22.26;22.60\r\n2014-04-22;38.43;39.79;38.31;38.99;358000;38.99;22.19;22.48\r\n2014-04-21;38.05;38.74;37.77;38.41;316800;38.41;22.28;22.24\r\n2014-04-17;37.25;38.24;36.92;38.05;233700;38.05;22.30;22.26\r\n2014-04-16;36.37;37.27;36.17;37.26;144800;37.26;22.59;22.35\r\n2014-04-15;36.08;36.74;35.09;36.05;223100;36.05;22.46;22.35\r\n2014-04-14;36.55;36.90;35.33;36.02;296100;36.02;22.65;22.45\r\n2014-04-11;36.26;37.09;36.08;36.13;282700;36.13;22.31;22.43\r\n2014-04-10;37.06;37.16;36.13;36.46;309800;36.46;23.11;22.56\r\n2014-04-09;36.08;37.26;35.66;37.13;209400;37.13;23.15;23.18\r\n2014-04-08;35.50;36.16;35.28;35.85;215700;35.85;23.04;23.11\r\n2014-04-07;36.49;37.30;35.27;35.48;312400;35.48;23.41;23.09\r\n2014-04-04;38.39;38.90;36.60;36.93;306500;36.93;24.00;23.44\r\n2014-04-03;38.62;39.78;37.90;38.14;269800;38.14;23.97;23.90\r\n2014-04-02;38.66;38.84;38.04;38.56;398200;38.56;23.70;23.88\r\n2014-04-01;37.21;38.65;36.58;38.49;410900;38.49;23.34;23.75"; Assert.AreEqual(expected, actual); - var actualColumnNameToIndex = rows.First().ColumnNameToIndex; var expectedColumnNameToIndex = new Dictionary { { "Date", 0 }, { "Open", 1 }, { "High", 2 }, { "Low", 3 }, { "Close", 4 }, { "Volume", 5 }, { "Adj Close", 6 }, { "OpenOther", 7 }, { "CloseOther", 8 } }; diff --git a/src/SharpLearning.InputOutput.Test/Csv/CsvRowTest.cs b/src/SharpLearning.InputOutput.Test/Csv/CsvRowTest.cs index dc745f71..3820ba32 100644 --- a/src/SharpLearning.InputOutput.Test/Csv/CsvRowTest.cs +++ b/src/SharpLearning.InputOutput.Test/Csv/CsvRowTest.cs @@ -24,15 +24,15 @@ public void CsvRow_Constructor_data_columnNames_does_not_match() public void CsvRow_Equal() { var row = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, + new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 } }, ["a", "b", "c"]); var equal = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, + new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 } }, ["a", "b", "c"]); var notEqual = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, + new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 } }, ["123", "b", "c"]); Assert.AreEqual(equal, row); @@ -43,19 +43,18 @@ public void CsvRow_Equal() public void CsvRow_Equal_Params() { var row = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, + new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 } }, "a", "b", "c"); var equal = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, + new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 } }, "a", "b", "c"); var notEqual = new CsvRow( - new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 }, }, + new Dictionary { { "F1", 0 }, { "F2", 0 }, { "F3", 0 } }, "123", "b", "c"); Assert.AreEqual(equal, row); Assert.AreNotEqual(notEqual, row); } - } diff --git a/src/SharpLearning.InputOutput.Test/Csv/CsvWriterTest.cs b/src/SharpLearning.InputOutput.Test/Csv/CsvWriterTest.cs index bdefe41a..3991d324 100644 --- a/src/SharpLearning.InputOutput.Test/Csv/CsvWriterTest.cs +++ b/src/SharpLearning.InputOutput.Test/Csv/CsvWriterTest.cs @@ -21,8 +21,8 @@ public void CsvWriter_Write() sut.Write(data); var actual = writer.ToString(); - var Expected = "AptitudeTestScore;PreviousExperience_month;Pass\r\n5;6;0\r\n1;15;0\r\n1;12;0\r\n4;6;0\r\n1;15;1\r\n1;6;0\r\n4;16;1\r\n1;10;1\r\n3;12;0\r\n4;26;1\r\n5;2;1\r\n1;12;0\r\n3;18;0\r\n3;3;0\r\n1;24;1\r\n2;8;0\r\n1;9;0\r\n4;18;0\r\n4;22;1\r\n5;3;1\r\n4;12;0\r\n4;24;1\r\n2;18;1\r\n2;6;0\r\n1;8;0\r\n5;12;0"; - Assert.AreEqual(Expected, actual); + var expected = "AptitudeTestScore;PreviousExperience_month;Pass\r\n5;6;0\r\n1;15;0\r\n1;12;0\r\n4;6;0\r\n1;15;1\r\n1;6;0\r\n4;16;1\r\n1;10;1\r\n3;12;0\r\n4;26;1\r\n5;2;1\r\n1;12;0\r\n3;18;0\r\n3;3;0\r\n1;24;1\r\n2;8;0\r\n1;9;0\r\n4;18;0\r\n4;22;1\r\n5;3;1\r\n4;12;0\r\n4;24;1\r\n2;18;1\r\n2;6;0\r\n1;8;0\r\n5;12;0"; + Assert.AreEqual(expected, actual); } [TestMethod] @@ -37,7 +37,7 @@ public void CsvWriter_Write_Append() sut.Write(data, false); var actual = writer.ToString(); - var Expected = "\r\n5;6;0\r\n1;15;0\r\n1;12;0\r\n4;6;0\r\n1;15;1\r\n1;6;0\r\n4;16;1\r\n1;10;1\r\n3;12;0\r\n4;26;1\r\n5;2;1\r\n1;12;0\r\n3;18;0\r\n3;3;0\r\n1;24;1\r\n2;8;0\r\n1;9;0\r\n4;18;0\r\n4;22;1\r\n5;3;1\r\n4;12;0\r\n4;24;1\r\n2;18;1\r\n2;6;0\r\n1;8;0\r\n5;12;0"; - Assert.AreEqual(Expected, actual); + var expected = "\r\n5;6;0\r\n1;15;0\r\n1;12;0\r\n4;6;0\r\n1;15;1\r\n1;6;0\r\n4;16;1\r\n1;10;1\r\n3;12;0\r\n4;26;1\r\n5;2;1\r\n1;12;0\r\n3;18;0\r\n3;3;0\r\n1;24;1\r\n2;8;0\r\n1;9;0\r\n4;18;0\r\n4;22;1\r\n5;3;1\r\n4;12;0\r\n4;24;1\r\n2;18;1\r\n2;6;0\r\n1;8;0\r\n5;12;0"; + Assert.AreEqual(expected, actual); } } diff --git a/src/SharpLearning.InputOutput.Test/DataSetUtilities.cs b/src/SharpLearning.InputOutput.Test/DataSetUtilities.cs index 53f51ab1..469123f7 100644 --- a/src/SharpLearning.InputOutput.Test/DataSetUtilities.cs +++ b/src/SharpLearning.InputOutput.Test/DataSetUtilities.cs @@ -107,5 +107,4 @@ public static class DataSetUtilities 2014-04-03;23.97;23.97;23.77;23.90;43600;23.90 2014-04-02;23.70;23.92;23.51;23.88;74700;23.88 2014-04-01;23.34;23.87;23.13;23.75;146100;23.75"; - } diff --git a/src/SharpLearning.InputOutput.Test/Serialization/GenericXmlDataContractSerializerTest.cs b/src/SharpLearning.InputOutput.Test/Serialization/GenericXmlDataContractSerializerTest.cs index 32adbe7b..7861942c 100644 --- a/src/SharpLearning.InputOutput.Test/Serialization/GenericXmlDataContractSerializerTest.cs +++ b/src/SharpLearning.InputOutput.Test/Serialization/GenericXmlDataContractSerializerTest.cs @@ -52,7 +52,6 @@ public void GenericXmlDataContractSerializer_Serialize_Dont_PreserveObjectRefere [TestMethod] public void GenericXmlDataContractSerializer_Deserialize_Dont_PreserveObjectReferences() { - var sut = new GenericXmlDataContractSerializer(preserveObjectReferences: false); var reader = new StringReader(m_serializationString_DontPreserveObjectReferences); diff --git a/src/SharpLearning.InputOutput/Csv/CsvParser.cs b/src/SharpLearning.InputOutput/Csv/CsvParser.cs index 4b5b7dcc..728eaace 100644 --- a/src/SharpLearning.InputOutput/Csv/CsvParser.cs +++ b/src/SharpLearning.InputOutput/Csv/CsvParser.cs @@ -6,7 +6,7 @@ namespace SharpLearning.InputOutput.Csv; /// -/// CsvParser +/// CsvParser /// public sealed class CsvParser { @@ -51,7 +51,9 @@ public IEnumerable EnumerateRows(Func selectColumnNames) " Column names cannot be selected in this made"); } +#pragma warning disable RCS1227 // Validate arguments correctly using var reader = m_getReader(); +#pragma warning restore RCS1227 // Validate arguments correctly var headerLine = reader.ReadLine(); var columnNameToIndex = TrimSplitLineTrimColumnsToDictionary(headerLine); var columnNames = columnNameToIndex.Keys.Where(name => selectColumnNames(name)) @@ -82,7 +84,10 @@ public IEnumerable EnumerateRows(params string[] columnNames) "Column names cannot be selected in this made"); } +#pragma warning disable RCS1227 // Validate arguments correctly using var reader = m_getReader(); +#pragma warning restore RCS1227 // Validate arguments correctly + var headerLine = reader.ReadLine(); var columnNameToIndex = TrimSplitLineTrimColumnsToDictionary(headerLine); var indices = columnNameToIndex.GetValues(columnNames); @@ -98,7 +103,7 @@ public IEnumerable EnumerateRows(params string[] columnNames) } /// - /// Enumerates the row of all columns in the csv file + /// Enumerates the row of all columns in the csv file /// /// public IEnumerable EnumerateRows() @@ -237,7 +242,7 @@ static string[] SplitText(string csvText, char separator) { if (!inText) { - tokens.Add(csvText.Substring(last + 1, (current - last)).Trim(' ', separator)); + tokens.Add(csvText.Substring(last + 1, current - last).Trim(' ', separator)); last = current; } } diff --git a/src/SharpLearning.InputOutput/Csv/CsvRow.cs b/src/SharpLearning.InputOutput/Csv/CsvRow.cs index c1d15959..7428c9bb 100644 --- a/src/SharpLearning.InputOutput/Csv/CsvRow.cs +++ b/src/SharpLearning.InputOutput/Csv/CsvRow.cs @@ -19,11 +19,6 @@ public class CsvRow /// public readonly Dictionary ColumnNameToIndex; - /// - /// - /// - /// - /// public CsvRow(Dictionary columnNameToIndex, params string[] data) { if (data == null) { throw new ArgumentException("row"); } @@ -33,41 +28,21 @@ public CsvRow(Dictionary columnNameToIndex, params string[] data) ColumnNameToIndex = columnNameToIndex; } - /// - /// - /// - /// - /// public bool Equals(CsvRow other) { if (!Values.SequenceEqual(other.Values)) + { return false; + } - if (!ColumnNameToIndex.SequenceEqual(other.ColumnNameToIndex)) - return false; - - return true; + return ColumnNameToIndex.SequenceEqual(other.ColumnNameToIndex); } - /// - /// - /// - /// - /// public override bool Equals(object obj) { - if (obj is CsvRow other) - { - return Equals(other); - } - - return false; + return obj is CsvRow other && Equals(other); } - /// - /// - /// - /// public override int GetHashCode() { unchecked // Overflow is fine, just wrap diff --git a/src/SharpLearning.InputOutput/Csv/CsvRowExtensions.cs b/src/SharpLearning.InputOutput/Csv/CsvRowExtensions.cs index a9da8e01..20d7136b 100644 --- a/src/SharpLearning.InputOutput/Csv/CsvRowExtensions.cs +++ b/src/SharpLearning.InputOutput/Csv/CsvRowExtensions.cs @@ -12,9 +12,6 @@ namespace SharpLearning.InputOutput.Csv; /// public static class CsvRowExtensions { - /// - /// - /// public static readonly Converter DefaultF64Converter = ArrayExtensions.DefaultF64Converter; /// @@ -25,7 +22,7 @@ public static class CsvRowExtensions /// public static string GetValue(this CsvRow row, string columnName) { - return row.Values[(row.ColumnNameToIndex[columnName])]; + return row.Values[row.ColumnNameToIndex[columnName]]; } /// @@ -41,7 +38,6 @@ public static void SetValue(this CsvRow row, string columnName, string value) row.Values[index] = value; } - /// /// Gets the CsvRow values based on the supplied column names /// @@ -80,7 +76,7 @@ public static IEnumerable Keep(this IEnumerable dataRows, params public static IEnumerable Remove(this IEnumerable dataRows, params string[] columnNames) { var dataRowsList = dataRows.ToList(); - var columnsToKeep = dataRowsList.First().ColumnNameToIndex.Keys.Except(columnNames).ToArray(); + var columnsToKeep = dataRowsList[0].ColumnNameToIndex.Keys.Except(columnNames).ToArray(); var index = 0; var reducedColumnNameToIndex = columnsToKeep.ToDictionary(n => n, n => index++); @@ -110,14 +106,11 @@ public static double[] ToF64Vector(this IEnumerable dataRows, Converter converter) { var dataRowsList = dataRows.ToList(); - var first = dataRowsList.First(); - - if (first.ColumnNameToIndex.Count != 1) - { - throw new ArgumentException("Vector can only be genereded from a single column"); - } + var first = dataRowsList[0]; - return dataRowsList.SelectMany(values => values.Values.AsF64(converter)).ToArray(); + return first.ColumnNameToIndex.Count != 1 + ? throw new ArgumentException("Vector can only be genereded from a single column") + : dataRowsList.SelectMany(values => values.Values.AsF64(converter)).ToArray(); } /// @@ -128,14 +121,11 @@ public static double[] ToF64Vector(this IEnumerable dataRows, public static string[] ToStringVector(this IEnumerable dataRows) { var dataRowsList = dataRows.ToList(); - var first = dataRowsList.First(); + var first = dataRowsList[0]; - if (first.ColumnNameToIndex.Count != 1) - { - throw new ArgumentException("Vector can only be generated from a single column"); - } - - return dataRowsList.SelectMany(values => values.Values).ToArray(); + return first.ColumnNameToIndex.Count != 1 + ? throw new ArgumentException("Vector can only be generated from a single column") + : dataRowsList.SelectMany(values => values.Values).ToArray(); } /// @@ -158,7 +148,7 @@ public static F64Matrix ToF64Matrix(this IEnumerable dataRows, Converter converter) { var dataRowsList = dataRows.ToList(); - var first = dataRowsList.First(); + var first = dataRowsList[0]; var cols = first.ColumnNameToIndex.Count; var rows = 0; @@ -179,7 +169,7 @@ public static F64Matrix ToF64Matrix(this IEnumerable dataRows, public static StringMatrix ToStringMatrix(this IEnumerable dataRows) { var dataRowsList = dataRows.ToList(); - var first = dataRowsList.First(); + var first = dataRowsList[0]; var cols = first.ColumnNameToIndex.Count; var rows = 0; @@ -193,7 +183,7 @@ public static StringMatrix ToStringMatrix(this IEnumerable dataRows) } /// - /// Enumerates a Matrix to CsvRows. + /// Enumerates a Matrix to CsvRows. /// /// /// @@ -251,7 +241,7 @@ public static void WriteFile(this IEnumerable dataRows, } /// - /// Combines two IEnumerables based on column header names. Matching rows are combined and parsed on. + /// Combines two IEnumerables based on column header names. Matching rows are combined and parsed on. /// /// /// @@ -273,7 +263,10 @@ public static IEnumerable KeyCombine(this IEnumerable thisRows, foreach (var key in dictThisRows.Keys) { - if (!dictOtherRows.ContainsKey(key)) continue; + if (!dictOtherRows.ContainsKey(key)) + { + continue; + } var thisValues = dictThisRows[key].Values; var otherValues = dictOtherRows[key].Values; @@ -301,7 +294,7 @@ public static IEnumerable KeyCombine(this IEnumerable thisRows, } /// - /// Combines two IEnumerables based on a row matcher function. Matching rows are combined and parsed on. + /// Combines two IEnumerables based on a row matcher function. Matching rows are combined and parsed on. /// /// /// diff --git a/src/SharpLearning.InputOutput/Csv/CsvWriter.cs b/src/SharpLearning.InputOutput/Csv/CsvWriter.cs index 181c09ff..fb4f22c1 100644 --- a/src/SharpLearning.InputOutput/Csv/CsvWriter.cs +++ b/src/SharpLearning.InputOutput/Csv/CsvWriter.cs @@ -5,9 +5,6 @@ namespace SharpLearning.InputOutput.Csv; -/// -/// -/// public class CsvWriter { readonly Func m_writer; @@ -35,7 +32,7 @@ public void Write(IEnumerable rows, bool writeHeader = true) var rowsList = rows.ToList(); if (writeHeader) { - var headerValues = rowsList.First().ColumnNameToIndex + var headerValues = rowsList[0].ColumnNameToIndex .OrderBy(kvp => kvp.Value) .Select(kvp => kvp.Key); diff --git a/src/SharpLearning.InputOutput/Serialization/GenericBinarySerializer.cs b/src/SharpLearning.InputOutput/Serialization/GenericBinarySerializer.cs index c63138a7..c40e60cb 100644 --- a/src/SharpLearning.InputOutput/Serialization/GenericBinarySerializer.cs +++ b/src/SharpLearning.InputOutput/Serialization/GenericBinarySerializer.cs @@ -4,7 +4,6 @@ namespace SharpLearning.InputOutput.Serialization; - /// /// Generic xml serializer using BinaryFormatter /// @@ -21,9 +20,9 @@ public T Deserialize(Func reader) var serializer = new BinaryFormatter(); using var baseReader = reader(); - if (baseReader is StreamReader) + if (baseReader is StreamReader streamReader) { - var baseStream = ((StreamReader)baseReader).BaseStream; + var baseStream = streamReader.BaseStream; return (T)serializer.Deserialize(baseStream); } else if (baseReader is StringReader baseStream) @@ -49,9 +48,9 @@ public void Serialize(T data, Func writer) var serializer = new BinaryFormatter(); using var baseWriter = writer(); - if (baseWriter is StreamWriter) + if (baseWriter is StreamWriter streamWriter) { - var baseStream = ((StreamWriter)baseWriter).BaseStream; + var baseStream = streamWriter.BaseStream; serializer.Serialize(baseStream, data); } else if (baseWriter is StringWriter) diff --git a/src/SharpLearning.InputOutput/Serialization/GenericXmlDataContractSerializer.cs b/src/SharpLearning.InputOutput/Serialization/GenericXmlDataContractSerializer.cs index 59f4a88c..159617db 100644 --- a/src/SharpLearning.InputOutput/Serialization/GenericXmlDataContractSerializer.cs +++ b/src/SharpLearning.InputOutput/Serialization/GenericXmlDataContractSerializer.cs @@ -16,14 +16,13 @@ public sealed class GenericXmlDataContractSerializer : IGenericSerializer readonly Type[] m_knownTypes; readonly bool m_preserveObjectReferences; - /// /// Generic xml serializer using DataContractSerializer /// - /// If the serializer fails with an unknown type exception. + /// If the serializer fails with an unknown type exception. /// The necesarry types can be provided in the cosntructer. - /// This parameter controls if object references should be preserved in the serialization (default is true). - /// This adds extra information to the xml which is needed when serializing some model types. + /// This parameter controls if object references should be preserved in the serialization (default is true). + /// This adds extra information to the xml which is needed when serializing some model types. /// Currently only the SharpLearning.Neural models require this. public GenericXmlDataContractSerializer(Type[] knownTypes, bool preserveObjectReferences = true) { @@ -34,8 +33,8 @@ public GenericXmlDataContractSerializer(Type[] knownTypes, bool preserveObjectRe /// /// Generic xml serializer using DataContractSerializer /// - /// This parameter controls if object references should be preserved in the serialization (default is true). - /// This adds extra information to the xml which is needed when serializing some model types. + /// This parameter controls if object references should be preserved in the serialization (default is true). + /// This adds extra information to the xml which is needed when serializing some model types. /// Currently only the SharpLearning.Neural models require this. public GenericXmlDataContractSerializer(bool preserveObjectReferences = true) : this([], preserveObjectReferences) @@ -68,7 +67,7 @@ public void Serialize(T data, Func writer) MaxItemsInObjectGraph = int.MaxValue, IgnoreExtensionDataObject = false, PreserveObjectReferences = m_preserveObjectReferences, - DataContractResolver = new GenericResolver() + DataContractResolver = new GenericResolver(), }); serializer.WriteObject(xmlWriter, data); @@ -90,7 +89,7 @@ public T Deserialize(Func reader) MaxItemsInObjectGraph = int.MaxValue, IgnoreExtensionDataObject = false, PreserveObjectReferences = m_preserveObjectReferences, - DataContractResolver = new GenericResolver() + DataContractResolver = new GenericResolver(), }); return (T)serializer.ReadObject(xmlReader); @@ -108,13 +107,7 @@ internal class GenericResolver : DataContractResolver readonly Dictionary> m_typeToNames; readonly Dictionary> m_namesToType; - public Type[] KnownTypes - { - get - { - return m_typeToNames.Keys.ToArray(); - } - } + public Type[] KnownTypes => m_typeToNames.Keys.ToArray(); public GenericResolver() : this(ReflectTypes()) @@ -132,7 +125,7 @@ public GenericResolver(Type[] typesToResolve) m_typeToNames[type] = new Tuple(typeNamespace, typeName); - if (m_namesToType.ContainsKey(typeNamespace) == false) + if (!m_namesToType.ContainsKey(typeNamespace)) { m_namesToType[typeNamespace] = []; } @@ -218,7 +211,6 @@ static Type[] ReflectTypes() return types.ToArray(); } - static Type[] GetTypes(Assembly assembly, bool publicOnly = true) { Type[] allTypes = assembly.GetTypes(); @@ -227,17 +219,17 @@ static Type[] GetTypes(Assembly assembly, bool publicOnly = true) foreach (Type type in allTypes) { - if (type.IsEnum == false && - type.IsInterface == false && - type.IsGenericTypeDefinition == false) + if (!type.IsEnum && + !type.IsInterface && + !type.IsGenericTypeDefinition) { - if (publicOnly == true && type.IsPublic == false) + if (publicOnly && !type.IsPublic) { - if (type.IsNested == false) + if (!type.IsNested) { continue; } - if (type.IsNestedPrivate == true) + if (type.IsNestedPrivate) { continue; } diff --git a/src/SharpLearning.Metrics.Test/Classification/F1ScoreMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/F1ScoreMetricTest.cs index 474e4a4b..8a4e90cc 100644 --- a/src/SharpLearning.Metrics.Test/Classification/F1ScoreMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/F1ScoreMetricTest.cs @@ -111,7 +111,7 @@ public void F1ScoreMetric_ErrorString_TargetStringMapping() var targetStringMapping = new Dictionary { { 0, "Negative" }, - { 1, "Positive" } + { 1, "Positive" }, }; var sut = new F1ScoreMetric(1); diff --git a/src/SharpLearning.Metrics.Test/Classification/PrecisionMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/PrecisionMetricTest.cs index 0520ca88..3ae39390 100644 --- a/src/SharpLearning.Metrics.Test/Classification/PrecisionMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/PrecisionMetricTest.cs @@ -111,7 +111,7 @@ public void PrecisionMetric_ErrorString_TargetStringMapping() var targetStringMapping = new Dictionary { { 0, "Negative" }, - { 1, "Positive" } + { 1, "Positive" }, }; var sut = new PrecisionMetric(1); diff --git a/src/SharpLearning.Metrics.Test/Classification/RecallMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/RecallMetricTest.cs index 22e39e85..ae6de655 100644 --- a/src/SharpLearning.Metrics.Test/Classification/RecallMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/RecallMetricTest.cs @@ -111,7 +111,7 @@ public void RecallMetric_ErrorString_TargetStringMapping() var targetStringMapping = new Dictionary { { 0, "Negative" }, - { 1, "Positive" } + { 1, "Positive" }, }; var sut = new RecallMetric(1); @@ -120,5 +120,4 @@ public void RecallMetric_ErrorString_TargetStringMapping() Assert.AreEqual(expected, actual); } - } diff --git a/src/SharpLearning.Metrics.Test/Classification/RocAucClassificationProbabilityMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/RocAucClassificationProbabilityMetricTest.cs index 8a0d7447..cc5c63cd 100644 --- a/src/SharpLearning.Metrics.Test/Classification/RocAucClassificationProbabilityMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/RocAucClassificationProbabilityMetricTest.cs @@ -57,7 +57,6 @@ public void RocAucClassificationMetric_Error_Random() new ProbabilityPrediction(1.0, new Dictionary { { 0.0, random.NextDouble() }, { 1.0, random.NextDouble() } })) .ToArray(); - var sut = new RocAucClassificationProbabilityMetric(1); var actual = sut.Error(targets, probabilities); @@ -154,7 +153,7 @@ public void RocAucClassificationMetric_ErrorString_TargetStringMapping() var targetStringMapping = new Dictionary { { 0, "Negative" }, - { 1, "Positive" } + { 1, "Positive" }, }; var actual = sut.ErrorString(targets, probabilities, targetStringMapping); diff --git a/src/SharpLearning.Metrics.Test/Classification/TotalErrorClassificationMetricTest.cs b/src/SharpLearning.Metrics.Test/Classification/TotalErrorClassificationMetricTest.cs index be404faf..eb4276a1 100644 --- a/src/SharpLearning.Metrics.Test/Classification/TotalErrorClassificationMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Classification/TotalErrorClassificationMetricTest.cs @@ -52,7 +52,7 @@ public void TotalErrorClassificationMetric_ErrorString() var sut = new TotalErrorClassificationMetric(); var actual = sut.ErrorString(targets, predictions); - var expected = ";0;1;2;3;4;0;1;2;3;4\r\n0;1.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000;0.000\r\n1;0.000;2.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000\r\n2;0.000;0.000;1.000;1.000;0.000;0.000;0.000;50.000;50.000;0.000\r\n3;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\n4;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\nError: 28.571\r\n"; ; + var expected = ";0;1;2;3;4;0;1;2;3;4\r\n0;1.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000;0.000\r\n1;0.000;2.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000\r\n2;0.000;0.000;1.000;1.000;0.000;0.000;0.000;50.000;50.000;0.000\r\n3;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\n4;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\nError: 28.571\r\n"; Assert.AreEqual(expected, actual); } @@ -66,7 +66,7 @@ public void TotalErrorClassificationMetric_ErrorString_TargetStringMapping() var targetStringMapping = new Dictionary { { 0, "One" }, { 1, "Two" }, { 2, "Three" }, { 3, "Four" }, { 4, "Five" } }; var actual = sut.ErrorString(targets, predictions, targetStringMapping); - var expected = ";One;Two;Three;Four;Five;One;Two;Three;Four;Five\r\nOne;1.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000;0.000\r\nTwo;0.000;2.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000\r\nThree;0.000;0.000;1.000;1.000;0.000;0.000;0.000;50.000;50.000;0.000\r\nFour;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\nFive;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\nError: 28.571\r\n"; ; + var expected = ";One;Two;Three;Four;Five;One;Two;Three;Four;Five\r\nOne;1.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000;0.000\r\nTwo;0.000;2.000;0.000;0.000;0.000;0.000;100.000;0.000;0.000;0.000\r\nThree;0.000;0.000;1.000;1.000;0.000;0.000;0.000;50.000;50.000;0.000\r\nFour;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\nFive;0.000;0.000;0.000;0.000;1.000;0.000;0.000;0.000;0.000;100.000\r\nError: 28.571\r\n"; Assert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.Metrics.Test/Regression/RootMeanLogRegressionMetricTest.cs b/src/SharpLearning.Metrics.Test/Regression/RootMeanLogRegressionMetricTest.cs index eeb1ae6c..be2902f3 100644 --- a/src/SharpLearning.Metrics.Test/Regression/RootMeanLogRegressionMetricTest.cs +++ b/src/SharpLearning.Metrics.Test/Regression/RootMeanLogRegressionMetricTest.cs @@ -27,5 +27,4 @@ public void RootMeanLogRegressionMetric_Error_Zero_Error() var actual = sut.Error(targets, predictions); Assert.AreEqual(0.0, actual); } - } diff --git a/src/SharpLearning.Metrics/Classification/ClassificationMatrix.cs b/src/SharpLearning.Metrics/Classification/ClassificationMatrix.cs index 816f7a60..7e7be83f 100644 --- a/src/SharpLearning.Metrics/Classification/ClassificationMatrix.cs +++ b/src/SharpLearning.Metrics/Classification/ClassificationMatrix.cs @@ -3,9 +3,6 @@ namespace SharpLearning.Metrics.Classification; -/// -/// -/// public static class ClassificationMatrix { /// diff --git a/src/SharpLearning.Metrics/Classification/ClassificationMatrixStringConverter.cs b/src/SharpLearning.Metrics/Classification/ClassificationMatrixStringConverter.cs index 68a0889c..8af2126e 100644 --- a/src/SharpLearning.Metrics/Classification/ClassificationMatrixStringConverter.cs +++ b/src/SharpLearning.Metrics/Classification/ClassificationMatrixStringConverter.cs @@ -1,12 +1,8 @@ using System.Collections.Generic; -using System.Linq; using System.Text; namespace SharpLearning.Metrics.Classification; -/// -/// -/// public static class ClassificationMatrixStringConverter { /// @@ -27,7 +23,7 @@ public static string Convert( double[,] errorMatrix, double error) { - var uniqueStringTargets = uniqueTargets.Select(t => targetStringMapping[t]).ToList(); + var uniqueStringTargets = uniqueTargets.ConvertAll(t => targetStringMapping[t]); return Convert(uniqueStringTargets, confusionMatrix, errorMatrix, error); } @@ -71,7 +67,7 @@ public static string Convert( builder.AppendLine(row); } - builder.AppendLine(string.Format("Error: {0:0.000}", 100.0 * error)); + builder.AppendFormat("Error: {0:0.000}", 100.0 * error).AppendLine(); return builder.ToString(); } diff --git a/src/SharpLearning.Metrics/Classification/F1ScoreMetric.cs b/src/SharpLearning.Metrics/Classification/F1ScoreMetric.cs index c5bd2a63..9f1cb232 100644 --- a/src/SharpLearning.Metrics/Classification/F1ScoreMetric.cs +++ b/src/SharpLearning.Metrics/Classification/F1ScoreMetric.cs @@ -69,12 +69,7 @@ double Precision(T[] targets, T[] predictions) } } - if (truePositives + falsePositves == 0) - { - return 0.0; - } - - return (double)truePositives / ((double)truePositives + (double)falsePositves); + return truePositives + falsePositves == 0 ? 0.0 : (double)truePositives / ((double)truePositives + (double)falsePositves); } double Recall(T[] targets, T[] predictions) @@ -97,12 +92,7 @@ double Recall(T[] targets, T[] predictions) } } - if (truePositives + falseNegatives == 0) - { - return 0.0; - } - - return (double)truePositives / ((double)truePositives + (double)falseNegatives); + return truePositives + falseNegatives == 0 ? 0.0 : (double)truePositives / ((double)truePositives + (double)falseNegatives); } /// diff --git a/src/SharpLearning.Metrics/Classification/IClassificationMetric.cs b/src/SharpLearning.Metrics/Classification/IClassificationMetric.cs index c9fc0e2c..ced9acbe 100644 --- a/src/SharpLearning.Metrics/Classification/IClassificationMetric.cs +++ b/src/SharpLearning.Metrics/Classification/IClassificationMetric.cs @@ -1,6 +1,7 @@  using System.Collections.Generic; using SharpLearning.Common.Interfaces; + namespace SharpLearning.Metrics.Classification; /// diff --git a/src/SharpLearning.Metrics/Classification/IClassificationProbabilityMetric.cs b/src/SharpLearning.Metrics/Classification/IClassificationProbabilityMetric.cs index e1ecfca2..9e20bb53 100644 --- a/src/SharpLearning.Metrics/Classification/IClassificationProbabilityMetric.cs +++ b/src/SharpLearning.Metrics/Classification/IClassificationProbabilityMetric.cs @@ -9,12 +9,6 @@ namespace SharpLearning.Metrics.Classification; /// public interface IClassificationProbabilityMetric : IMetric { - /// - /// - /// - /// - /// - /// new double Error(double[] targets, ProbabilityPrediction[] predictions); /// diff --git a/src/SharpLearning.Metrics/Classification/LogLossClassificationProbabilityMetric.cs b/src/SharpLearning.Metrics/Classification/LogLossClassificationProbabilityMetric.cs index 95b3ce40..a3623531 100644 --- a/src/SharpLearning.Metrics/Classification/LogLossClassificationProbabilityMetric.cs +++ b/src/SharpLearning.Metrics/Classification/LogLossClassificationProbabilityMetric.cs @@ -7,10 +7,10 @@ namespace SharpLearning.Metrics.Classification; /// /// The logarithm of the likelihood function for a Bernoulli random distribution. -/// In plain English, this error metric is typically used where you have to predict that something is true or false +/// In plain English, this error metric is typically used where you have to predict that something is true or false /// with a probability (likelihood) ranging from definitely true (1) to equally true (0.5) to definitely false(0). -/// The use of log on the error provides extreme punishments for being both confident and wrong. -/// In the worst possible case, a single prediction that something is definitely true (1) +/// The use of log on the error provides extreme punishments for being both confident and wrong. +/// In the worst possible case, a single prediction that something is definitely true (1) /// when it is actually false will add infinite to your error score and make every other entry pointless. /// https://www.kaggle.com/wiki/MultiClassLogLoss /// @@ -18,21 +18,11 @@ public sealed class LogLossClassificationProbabilityMetric : IClassificationProb { readonly double m_epsilon; - /// - /// - /// - /// public LogLossClassificationProbabilityMetric(double epsilon = 1e-15) { m_epsilon = epsilon; } - /// - /// - /// - /// - /// - /// public double Error(double[] targets, ProbabilityPrediction[] predictions) { var rows = targets.Length; @@ -41,8 +31,7 @@ public double Error(double[] targets, ProbabilityPrediction[] predictions) { var probabilities = predictions[i].Probabilities; var target = targets[i]; - var probabilitySum = probabilities.Select(p => p.Value) - .Sum(); + var probabilitySum = probabilities.Sum(p => p.Value); foreach (var probability in probabilities) { diff --git a/src/SharpLearning.Metrics/Classification/PrecisionMetric.cs b/src/SharpLearning.Metrics/Classification/PrecisionMetric.cs index b406b06a..34c79906 100644 --- a/src/SharpLearning.Metrics/Classification/PrecisionMetric.cs +++ b/src/SharpLearning.Metrics/Classification/PrecisionMetric.cs @@ -31,12 +31,9 @@ public double Error(T[] targets, T[] predictions) { var uniques = Utilities.UniqueTargetValues(targets, predictions); - if (uniques.Count > 2) - { - throw new ArgumentException("PrecisionMetric only supports binary classification problems"); - } - - return 1.0 - Precision(targets, predictions); + return uniques.Count > 2 + ? throw new ArgumentException("PrecisionMetric only supports binary classification problems") + : 1.0 - Precision(targets, predictions); } double Precision(T[] targets, T[] predictions) @@ -61,12 +58,7 @@ double Precision(T[] targets, T[] predictions) } } - if (truePositives + falsePositves == 0) - { - return 0.0; - } - - return (double)truePositives / ((double)truePositives + (double)falsePositves); + return truePositives + falsePositves == 0 ? 0.0 : (double)truePositives / ((double)truePositives + (double)falsePositves); } /// diff --git a/src/SharpLearning.Metrics/Classification/RecallMetric.cs b/src/SharpLearning.Metrics/Classification/RecallMetric.cs index ba12cceb..9e1662b2 100644 --- a/src/SharpLearning.Metrics/Classification/RecallMetric.cs +++ b/src/SharpLearning.Metrics/Classification/RecallMetric.cs @@ -31,12 +31,9 @@ public double Error(T[] targets, T[] predictions) { var uniques = Utilities.UniqueTargetValues(targets, predictions); - if (uniques.Count > 2) - { - throw new ArgumentException("RecallMetric only supports binary classification problems"); - } - - return 1.0 - Recall(targets, predictions); + return uniques.Count > 2 + ? throw new ArgumentException("RecallMetric only supports binary classification problems") + : 1.0 - Recall(targets, predictions); } double Recall(T[] targets, T[] predictions) @@ -61,12 +58,7 @@ double Recall(T[] targets, T[] predictions) } } - if (truePositives + falseNegatives == 0) - { - return 0.0; - } - - return (double)truePositives / ((double)truePositives + (double)falseNegatives); + return truePositives + falseNegatives == 0 ? 0.0 : (double)truePositives / ((double)truePositives + (double)falseNegatives); } /// diff --git a/src/SharpLearning.Metrics/Classification/RocAucClassificationProbabilityMetric.cs b/src/SharpLearning.Metrics/Classification/RocAucClassificationProbabilityMetric.cs index c041e80c..8b476a4d 100644 --- a/src/SharpLearning.Metrics/Classification/RocAucClassificationProbabilityMetric.cs +++ b/src/SharpLearning.Metrics/Classification/RocAucClassificationProbabilityMetric.cs @@ -55,9 +55,9 @@ public double Error(double[] targets, ProbabilityPrediction[] predictions) .ToArray(); var negativeCount = counts.Where(s => !s.Label.Equals(m_positiveTarget)) - .Select(s => s.Count).Sum(); ; + .Sum(s => s.Count); var positivesCount = counts.Where(s => s.Label.Equals(m_positiveTarget)) - .Select(s => s.Count).Sum(); + .Sum(s => s.Count); double auc = 0; double previousProbability = int.MinValue; @@ -70,7 +70,7 @@ public double Error(double[] targets, ProbabilityPrediction[] predictions) if (probability != previousProbability) { - auc += trapezoidArea( + auc += TrapezoidArea( fpCount * 1.0 / negativeCount, previousFpCount * 1.0 / negativeCount, tpCount * 1.0 / positivesCount, @@ -81,32 +81,35 @@ public double Error(double[] targets, ProbabilityPrediction[] predictions) previousTpCount = tpCount; } if (target.Equals(m_positiveTarget)) + { tpCount++; + } else + { fpCount++; + } } - auc += trapezoidArea( + auc += TrapezoidArea( 1.0, previousFpCount * 1.0 / negativeCount, 1.0, previousTpCount * 1.0 / positivesCount); return 1.0 - auc; } - /// - /// Calculate the trapezoidal area bound by the quad (X1,X2,Y1,Y2) + /// Calculate the trapezoidal area bound by the quad (X1,X2,Y1,Y2) /// - /// - /// - /// - /// + /// + /// + /// + /// /// - static double trapezoidArea(double X1, double X2, double Y1, double Y2) + static double TrapezoidArea(double x1, double x2, double y1, double y2) { - var b = Math.Abs(X1 - X2); - var height = (Y1 + Y2) / 2.0; - return (b * height); + var b = Math.Abs(x1 - x2); + var height = (y1 + y2) / 2.0; + return b * height; } /// diff --git a/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs b/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs index 71eb4b83..4836297b 100644 --- a/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs +++ b/src/SharpLearning.Metrics/ModelComparison/McNemarModelComparison.cs @@ -5,7 +5,7 @@ namespace SharpLearning.Metrics; /// -/// McNemar test for comparing two models. +/// McNemar test for comparing two models. /// The important part of the comparison is the number of times model1 is right where model2 is wrong and vice-versa. /// A clear improvement between two models would be if this number is, say 1 to 10. /// https://en.wikipedia.org/wiki/McNemar%27s_test diff --git a/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs b/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs index 7c09b940..8964c71b 100644 --- a/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs +++ b/src/SharpLearning.Metrics/Ranking/AveragePrecisionRankingMetric.cs @@ -32,7 +32,9 @@ public double Error(T[] targets, T[] predictions) { var length = m_k; if (predictions.Length < length) + { length = predictions.Length; + } m_workTargets.Clear(); foreach (var target in targets) @@ -49,7 +51,7 @@ public double Error(T[] targets, T[] predictions) if (m_workTargets.Contains(prediction) && !Contains(predictions, i, prediction)) { - hits += 1.0; + hits++; score += hits / (i + 1.0); } } diff --git a/src/SharpLearning.Metrics/Regression/CoefficientOfDeterminationMetric.cs b/src/SharpLearning.Metrics/Regression/CoefficientOfDeterminationMetric.cs index 5e2a5c2d..43c39455 100644 --- a/src/SharpLearning.Metrics/Regression/CoefficientOfDeterminationMetric.cs +++ b/src/SharpLearning.Metrics/Regression/CoefficientOfDeterminationMetric.cs @@ -3,9 +3,6 @@ namespace SharpLearning.Metrics.Regression; -/// -/// -/// public sealed class CoefficientOfDeterminationMetric : IRegressionMetric { /// @@ -22,14 +19,14 @@ public double Error(double[] targets, double[] predictions) } var targetMean = targets.Sum() / targets.Length; - var SStot = targets.Sum(target => Math.Pow(target - targetMean, 2)); - var SSres = 0.0; + var sStot = targets.Sum(target => Math.Pow(target - targetMean, 2)); + var sSres = 0.0; for (var i = 0; i < predictions.Length; i++) { - SSres += Math.Pow(targets[i] - predictions[i], 2); + sSres += Math.Pow(targets[i] - predictions[i], 2); } - return SStot != 0.0 ? 1 - SSres / SStot : 0; + return sStot != 0.0 ? 1 - sSres / sStot : 0; } } diff --git a/src/SharpLearning.Metrics/Regression/MeanAbsolutErrorRegressionMetric.cs b/src/SharpLearning.Metrics/Regression/MeanAbsolutErrorRegressionMetric.cs index b194227a..954a69c9 100644 --- a/src/SharpLearning.Metrics/Regression/MeanAbsolutErrorRegressionMetric.cs +++ b/src/SharpLearning.Metrics/Regression/MeanAbsolutErrorRegressionMetric.cs @@ -3,12 +3,12 @@ namespace SharpLearning.Metrics.Regression; /// -/// Calculates the mean absolute error between the targets and predictions e = Sum(abs(t - p))/length(t) +/// Calculates the mean absolute error between the targets and predictions e = Sum(abs(t - p))/length(t) /// public sealed class MeanAbsolutErrorRegressionMetric : IRegressionMetric { /// - /// Calculates the mean absolute error between the targets and predictions e = Sum(abs(t - p))/length(t) + /// Calculates the mean absolute error between the targets and predictions e = Sum(abs(t - p))/length(t) /// /// /// diff --git a/src/SharpLearning.Metrics/Regression/MeanSquaredErrorRegressionMetric.cs b/src/SharpLearning.Metrics/Regression/MeanSquaredErrorRegressionMetric.cs index 8dd409d2..557c5610 100644 --- a/src/SharpLearning.Metrics/Regression/MeanSquaredErrorRegressionMetric.cs +++ b/src/SharpLearning.Metrics/Regression/MeanSquaredErrorRegressionMetric.cs @@ -3,12 +3,12 @@ namespace SharpLearning.Metrics.Regression; /// -/// Calculates the mean squared error between the targets and predictions e = sum((t - p)^2)/length(t) +/// Calculates the mean squared error between the targets and predictions e = sum((t - p)^2)/length(t) /// public sealed class MeanSquaredErrorRegressionMetric : IRegressionMetric { /// - /// Calculates the mean squared error between the targets and predictions e = sum((t - p)^2)/length(t) + /// Calculates the mean squared error between the targets and predictions e = sum((t - p)^2)/length(t) /// /// /// diff --git a/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs b/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs index 8e0bdb8a..051548c4 100644 --- a/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs +++ b/src/SharpLearning.Metrics/Regression/NormalizedGiniCoefficientRegressionMetric.cs @@ -21,12 +21,6 @@ public double Error(double[] target, double[] predicted) return 1.0 - GiniCoefficient(target, predicted) / GiniCoefficient(target, target); } - /// - /// - /// - /// - /// - /// static double GiniCoefficient(double[] target, double[] predicted) { if (target.Length != predicted.Length) @@ -38,13 +32,13 @@ static double GiniCoefficient(double[] target, double[] predicted) var all = predicted.Zip(target, (prediction, actual) => new { actualValue = actual, - predictedValue = prediction + predictedValue = prediction, }) .Zip(Enumerable.Range(1, target.Length), (ap, i) => new { ap.actualValue, ap.predictedValue, - originalIndex = i + originalIndex = i, }) .OrderByDescending(ap => ap.predictedValue) // important to sort descending by prediction .ThenBy(ap => ap.originalIndex); // secondary sorts to ensure unambiguous orders diff --git a/src/SharpLearning.Metrics/Regression/RootMeanLogRegressionMetric.cs b/src/SharpLearning.Metrics/Regression/RootMeanLogRegressionMetric.cs index b23f4d18..8b096357 100644 --- a/src/SharpLearning.Metrics/Regression/RootMeanLogRegressionMetric.cs +++ b/src/SharpLearning.Metrics/Regression/RootMeanLogRegressionMetric.cs @@ -3,12 +3,12 @@ namespace SharpLearning.Metrics.Regression; /// -/// Calculates the root mean logarithmic error between the targets and predictions e = Sum(Log(t +1) - log(p +1)))/length(t) +/// Calculates the root mean logarithmic error between the targets and predictions e = Sum(Log(t +1) - log(p +1)))/length(t) /// public sealed class RootMeanLogRegressionMetric : IRegressionMetric { /// - /// Calculates the root mean logarithmic error between the targets and predictions e = Sum(Log(t +1) - log(p +1)))/length(t) + /// Calculates the root mean logarithmic error between the targets and predictions e = Sum(Log(t +1) - log(p +1)))/length(t) /// /// /// diff --git a/src/SharpLearning.Metrics/Regression/RootMeanSquarePercentageRegressionMetric.cs b/src/SharpLearning.Metrics/Regression/RootMeanSquarePercentageRegressionMetric.cs index f35f93a6..d9ff2af9 100644 --- a/src/SharpLearning.Metrics/Regression/RootMeanSquarePercentageRegressionMetric.cs +++ b/src/SharpLearning.Metrics/Regression/RootMeanSquarePercentageRegressionMetric.cs @@ -2,13 +2,10 @@ namespace SharpLearning.Metrics.Regression; -/// -/// -/// public sealed class RootMeanSquarePercentageRegressionMetric : IRegressionMetric { /// - /// Calculates the root mean square percentage error between the targets and predictions e = Sqrt(sum((t - p / t)^2)/length(t)) + /// Calculates the root mean square percentage error between the targets and predictions e = Sqrt(sum((t - p / t)^2)/length(t)) /// /// /// diff --git a/src/SharpLearning.Metrics/Regression/RootMeanSquareRegressionMetric.cs b/src/SharpLearning.Metrics/Regression/RootMeanSquareRegressionMetric.cs index fe70bf11..9d70b7e7 100644 --- a/src/SharpLearning.Metrics/Regression/RootMeanSquareRegressionMetric.cs +++ b/src/SharpLearning.Metrics/Regression/RootMeanSquareRegressionMetric.cs @@ -3,12 +3,12 @@ namespace SharpLearning.Metrics.Regression; /// -/// Calculates the root mean square error between the targets and predictions e = Sqrt(sum((t - p)^2)/length(t)) +/// Calculates the root mean square error between the targets and predictions e = Sqrt(sum((t - p)^2)/length(t)) /// public sealed class RootMeanSquareRegressionMetric : IRegressionMetric { /// - /// Calculates the root mean square error between the targets and predictions e = Sqrt(sum((t - p)^2)/length(t)) + /// Calculates the root mean square error between the targets and predictions e = Sqrt(sum((t - p)^2)/length(t)) /// /// /// diff --git a/src/SharpLearning.Neural.Test/Activations/SigmoidActivationTest.cs b/src/SharpLearning.Neural.Test/Activations/SigmoidActivationTest.cs index b01ac1c9..427a24e1 100644 --- a/src/SharpLearning.Neural.Test/Activations/SigmoidActivationTest.cs +++ b/src/SharpLearning.Neural.Test/Activations/SigmoidActivationTest.cs @@ -6,7 +6,6 @@ namespace SharpLearning.Neural.Test.Activations; [TestClass] public class SigmoidActivationTest { - [TestMethod] public void SigmoidActivation_Activiation() { @@ -15,7 +14,7 @@ public void SigmoidActivation_Activiation() sut.Activation(actual); var expected = new float[] { 2.06115369E-09f, 4.539787E-05f, 0.006692851f, - 0.268941432f, 0.5f,0.7310586f,0.9933072f,0.9999546f, 1 }; + 0.268941432f, 0.5f,0.7310586f,0.9933072f,0.9999546f, 1, }; Assert.AreEqual(expected[0], actual[0]); Assert.AreEqual(expected[1], actual[1]); @@ -32,7 +31,7 @@ public void SigmoidActivation_Activiation() public void SigmoidActivation_Derivative() { var activatedSigmoid = new float[] { 2.06115369E-09f, 4.539787E-05f, 0.006692851f, - 0.268941432f, 0.5f,0.7310586f,0.9933072f,0.9999546f, 1 }; + 0.268941432f, 0.5f,0.7310586f,0.9933072f,0.9999546f, 1, }; var actual = new float[9]; var sut = new SigmoidActivation(); @@ -41,7 +40,7 @@ public void SigmoidActivation_Derivative() var expected = new float[] { 2.06115369E-09f, 4.53958055E-05f, 0.00664805667f, 0.196611941f, 0.25f, 0.196611926f, 0.006648033f, - 4.54166766E-05f, 1 }; + 4.54166766E-05f, 1, }; Assert.AreEqual(expected[0], actual[0]); Assert.AreEqual(expected[1], actual[1]); diff --git a/src/SharpLearning.Neural.Test/Layers/BatchNormalizationLayerTest.cs b/src/SharpLearning.Neural.Test/Layers/BatchNormalizationLayerTest.cs index cb90bd8b..fd402e65 100644 --- a/src/SharpLearning.Neural.Test/Layers/BatchNormalizationLayerTest.cs +++ b/src/SharpLearning.Neural.Test/Layers/BatchNormalizationLayerTest.cs @@ -37,7 +37,6 @@ public void BatchNormalizationLayer_CopyLayerForPredictionModel() Assert.IsNull(actual.BatchColumnMeans); Assert.IsNull(actual.BatchcolumnVars); - Assert.AreEqual(sut.OutputActivations.RowCount, actual.OutputActivations.RowCount); Assert.AreEqual(sut.OutputActivations.ColumnCount, actual.OutputActivations.ColumnCount); } diff --git a/src/SharpLearning.Neural.Test/Layers/Conv2DLayerTest.cs b/src/SharpLearning.Neural.Test/Layers/Conv2DLayerTest.cs index 72cdb528..e380b936 100644 --- a/src/SharpLearning.Neural.Test/Layers/Conv2DLayerTest.cs +++ b/src/SharpLearning.Neural.Test/Layers/Conv2DLayerTest.cs @@ -13,7 +13,6 @@ namespace SharpLearning.Neural.Test.Layers; [TestClass] public class Conv2DLayerTest { - [TestMethod] public void Conv2DLayer_CopyLayerForPredictionModel() { diff --git a/src/SharpLearning.Neural.Test/Layers/MaxPool2DLayerTest.cs b/src/SharpLearning.Neural.Test/Layers/MaxPool2DLayerTest.cs index 2de5276a..3c427541 100644 --- a/src/SharpLearning.Neural.Test/Layers/MaxPool2DLayerTest.cs +++ b/src/SharpLearning.Neural.Test/Layers/MaxPool2DLayerTest.cs @@ -37,10 +37,10 @@ public void MaxPool2DLayer_CopyLayerForPredictionModel() Assert.AreEqual(sut.InputDepth, actual.InputDepth); Assert.AreEqual(sut.Switchx.Length, actual.Switchx.Length); - Assert.AreEqual(sut.Switchx.First().Length, actual.Switchx.First().Length); + Assert.AreEqual(sut.Switchx[0].Length, actual.Switchx[0].Length); Assert.AreEqual(sut.Switchy.Length, actual.Switchy.Length); - Assert.AreEqual(sut.Switchy.First().Length, actual.Switchy.First().Length); + Assert.AreEqual(sut.Switchy[0].Length, actual.Switchy[0].Length); Assert.AreEqual(sut.OutputActivations.RowCount, actual.OutputActivations.RowCount); Assert.AreEqual(sut.OutputActivations.ColumnCount, actual.OutputActivations.ColumnCount); diff --git a/src/SharpLearning.Neural.Test/Loss/HingeLossTest.cs b/src/SharpLearning.Neural.Test/Loss/HingeLossTest.cs index 09a48243..35bb71cd 100644 --- a/src/SharpLearning.Neural.Test/Loss/HingeLossTest.cs +++ b/src/SharpLearning.Neural.Test/Loss/HingeLossTest.cs @@ -29,5 +29,4 @@ public void HingeLoss_Loss_2() var actual = sut.Loss(targets, predictions); Assert.AreEqual(0.200000018, actual, 0.001); } - } diff --git a/src/SharpLearning.Neural.Test/MathNetExtensionsTest.cs b/src/SharpLearning.Neural.Test/MathNetExtensionsTest.cs index ad070399..7805c278 100644 --- a/src/SharpLearning.Neural.Test/MathNetExtensionsTest.cs +++ b/src/SharpLearning.Neural.Test/MathNetExtensionsTest.cs @@ -183,7 +183,6 @@ public void MathNetExtensions_Vector_Data_Modify() Assert.AreEqual(value, vector[changeIndex]); } - [TestMethod] public void MathNetExtensions_Matri_Row() { diff --git a/src/SharpLearning.Neural.Test/MatrixAsserts.cs b/src/SharpLearning.Neural.Test/MatrixAsserts.cs index 60710cae..d66f96fa 100644 --- a/src/SharpLearning.Neural.Test/MatrixAsserts.cs +++ b/src/SharpLearning.Neural.Test/MatrixAsserts.cs @@ -6,7 +6,7 @@ namespace SharpLearning.Neural.Test; public static class MatrixAsserts { /// - /// + /// /// /// /// diff --git a/src/SharpLearning.Neural.Test/Models/ClassificationNeuralNetModelTest.cs b/src/SharpLearning.Neural.Test/Models/ClassificationNeuralNetModelTest.cs index 08a7d618..8452b432 100644 --- a/src/SharpLearning.Neural.Test/Models/ClassificationNeuralNetModelTest.cs +++ b/src/SharpLearning.Neural.Test/Models/ClassificationNeuralNetModelTest.cs @@ -15,7 +15,7 @@ namespace SharpLearning.Neural.Test.Models; [TestClass] public class ClassificationNeuralNetModelTest { - readonly string m_classificationNeuralNetModelText = "\r\n\r\n \r\n \r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Undefined\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">5\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n 10\r\n \r\n 10\r\n \r\n 0.061048176\r\n -0.12424897\r\n 0.034037154\r\n -0.17095798\r\n -0.0293015\r\n 0.07928535\r\n 0.019817974\r\n -0.022676194\r\n 0.053380724\r\n 0.056660715\r\n \r\n \r\n <_length xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_values z:Ref=\"8\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n \r\n 10\r\n 1\r\n \r\n 1\r\n 10\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"11\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n 10\r\n 5\r\n \r\n 5\r\n 10\r\n \r\n 0.21166681\r\n 0.5034239\r\n 0.05792576\r\n 0.10567219\r\n 0.63595486\r\n -0.26405865\r\n 0.093034476\r\n -0.6748393\r\n 0.43568516\r\n -0.31619352\r\n 0.049649887\r\n -0.62087506\r\n 0.49028155\r\n 0.14165935\r\n 0.19910394\r\n -0.27401125\r\n 0.07792934\r\n -0.57351834\r\n 0.24993521\r\n 0.073334396\r\n 0.26890567\r\n -0.23734741\r\n 0.5185601\r\n -0.028178347\r\n -0.1760884\r\n -0.17318133\r\n 0.24288023\r\n 0.25036585\r\n 0.090462856\r\n -0.41986364\r\n -0.19802676\r\n 0.20760305\r\n 0.3292547\r\n 0.3825196\r\n 0.46280676\r\n 0.2868386\r\n -0.33798853\r\n -0.29822212\r\n -0.3728843\r\n -0.102399535\r\n 0.6806794\r\n -0.07970295\r\n -0.6313027\r\n -0.11854133\r\n -0.24065013\r\n -0.22071244\r\n -0.51799536\r\n 0.7483706\r\n 0.19805476\r\n -0.04553053\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">5\r\n <_values z:Ref=\"14\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Relu\r\n <_x003C_BatchNormalization_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">false\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">10\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n \r\n \r\n 10\r\n 1\r\n \r\n 1\r\n 10\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"18\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n 10\r\n 1\r\n \r\n 1\r\n 10\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"21\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Relu\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">10\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n \r\n \r\n \r\n 5\r\n \r\n 5\r\n \r\n 0.055229254\r\n 0.055058885\r\n -0.032196674\r\n -0.076797426\r\n -0.0029704913\r\n \r\n \r\n <_length xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">5\r\n <_values z:Ref=\"26\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n \r\n 5\r\n 1\r\n \r\n 1\r\n 5\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">5\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"29\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n 5\r\n 10\r\n \r\n 10\r\n 5\r\n \r\n 0.23901807\r\n 0.3766493\r\n 0.11987162\r\n 0.36883673\r\n -0.0039680153\r\n 0.20124118\r\n -0.49530536\r\n -0.28878102\r\n 0.64473915\r\n 0.2691261\r\n 0.34227744\r\n 0.20205034\r\n -0.5275287\r\n -0.075828865\r\n 0.51593465\r\n 0.21531579\r\n 0.59884524\r\n -0.02623786\r\n 0.2513816\r\n -0.31902543\r\n -0.07343852\r\n 0.090783365\r\n -0.30306\r\n 0.58301705\r\n 0.2896962\r\n -0.18064936\r\n 0.03864063\r\n 0.30340064\r\n 0.09413014\r\n 0.12541048\r\n 0.16858344\r\n 0.2737634\r\n -0.47789887\r\n 0.36593747\r\n 0.07092727\r\n -0.31664324\r\n -0.19696666\r\n 0.4224245\r\n 0.046168167\r\n -0.004227043\r\n -0.033315174\r\n 0.4916748\r\n -0.39052498\r\n -0.04658396\r\n -0.65876025\r\n -0.24503362\r\n 0.26030168\r\n -0.429512\r\n -0.7081847\r\n -0.46739566\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">5\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_values z:Ref=\"32\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Undefined\r\n <_x003C_BatchNormalization_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">false\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">5\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n \r\n 5\r\n \r\n 5\r\n 1\r\n \r\n 1\r\n 5\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">5\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"36\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Undefined\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">5\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n GlorotUniform\r\n \r\n \r\n 0\r\n 1\r\n 2\r\n 3\r\n 4\r\n \r\n"; + readonly string m_classificationNeuralNetModelText = "\r\n\r\n \r\n \r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Undefined\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">5\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n 10\r\n \r\n 10\r\n \r\n 0.061048176\r\n -0.12424897\r\n 0.034037154\r\n -0.17095798\r\n -0.0293015\r\n 0.07928535\r\n 0.019817974\r\n -0.022676194\r\n 0.053380724\r\n 0.056660715\r\n \r\n \r\n <_length xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_values z:Ref=\"8\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n \r\n 10\r\n 1\r\n \r\n 1\r\n 10\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"11\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n 10\r\n 5\r\n \r\n 5\r\n 10\r\n \r\n 0.21166681\r\n 0.5034239\r\n 0.05792576\r\n 0.10567219\r\n 0.63595486\r\n -0.26405865\r\n 0.093034476\r\n -0.6748393\r\n 0.43568516\r\n -0.31619352\r\n 0.049649887\r\n -0.62087506\r\n 0.49028155\r\n 0.14165935\r\n 0.19910394\r\n -0.27401125\r\n 0.07792934\r\n -0.57351834\r\n 0.24993521\r\n 0.073334396\r\n 0.26890567\r\n -0.23734741\r\n 0.5185601\r\n -0.028178347\r\n -0.1760884\r\n -0.17318133\r\n 0.24288023\r\n 0.25036585\r\n 0.090462856\r\n -0.41986364\r\n -0.19802676\r\n 0.20760305\r\n 0.3292547\r\n 0.3825196\r\n 0.46280676\r\n 0.2868386\r\n -0.33798853\r\n -0.29822212\r\n -0.3728843\r\n -0.102399535\r\n 0.6806794\r\n -0.07970295\r\n -0.6313027\r\n -0.11854133\r\n -0.24065013\r\n -0.22071244\r\n -0.51799536\r\n 0.7483706\r\n 0.19805476\r\n -0.04553053\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">5\r\n <_values z:Ref=\"14\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Relu\r\n <_x003C_BatchNormalization_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">false\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">10\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n \r\n \r\n 10\r\n 1\r\n \r\n 1\r\n 10\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"18\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n 10\r\n 1\r\n \r\n 1\r\n 10\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"21\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Relu\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">10\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n \r\n \r\n \r\n 5\r\n \r\n 5\r\n \r\n 0.055229254\r\n 0.055058885\r\n -0.032196674\r\n -0.076797426\r\n -0.0029704913\r\n \r\n \r\n <_length xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">5\r\n <_values z:Ref=\"26\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n \r\n 5\r\n 1\r\n \r\n 1\r\n 5\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">5\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"29\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n 5\r\n 10\r\n \r\n 10\r\n 5\r\n \r\n 0.23901807\r\n 0.3766493\r\n 0.11987162\r\n 0.36883673\r\n -0.0039680153\r\n 0.20124118\r\n -0.49530536\r\n -0.28878102\r\n 0.64473915\r\n 0.2691261\r\n 0.34227744\r\n 0.20205034\r\n -0.5275287\r\n -0.075828865\r\n 0.51593465\r\n 0.21531579\r\n 0.59884524\r\n -0.02623786\r\n 0.2513816\r\n -0.31902543\r\n -0.07343852\r\n 0.090783365\r\n -0.30306\r\n 0.58301705\r\n 0.2896962\r\n -0.18064936\r\n 0.03864063\r\n 0.30340064\r\n 0.09413014\r\n 0.12541048\r\n 0.16858344\r\n 0.2737634\r\n -0.47789887\r\n 0.36593747\r\n 0.07092727\r\n -0.31664324\r\n -0.19696666\r\n 0.4224245\r\n 0.046168167\r\n -0.004227043\r\n -0.033315174\r\n 0.4916748\r\n -0.39052498\r\n -0.04658396\r\n -0.65876025\r\n -0.24503362\r\n 0.26030168\r\n -0.429512\r\n -0.7081847\r\n -0.46739566\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">5\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_values z:Ref=\"32\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Undefined\r\n <_x003C_BatchNormalization_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">false\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">5\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n \r\n 5\r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Undefined\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">5\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n 5\r\n 1\r\n \r\n 1\r\n 5\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">5\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"36\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n \r\n GlorotUniform\r\n \r\n \r\n 0\r\n 1\r\n 2\r\n 3\r\n 4\r\n \r\n"; [TestMethod] public void ClassificationNeuralNetModel_Predict_Single() diff --git a/src/SharpLearning.Neural.Test/Models/RegressionNeuralNetModelTest.cs b/src/SharpLearning.Neural.Test/Models/RegressionNeuralNetModelTest.cs index c2412f39..d22349dc 100644 --- a/src/SharpLearning.Neural.Test/Models/RegressionNeuralNetModelTest.cs +++ b/src/SharpLearning.Neural.Test/Models/RegressionNeuralNetModelTest.cs @@ -14,7 +14,7 @@ namespace SharpLearning.Neural.Test.Models; [TestClass] public class RegressionNeuralNetModelTest { - readonly string m_regressionNeuralNetModelText = "\r\n\r\n \r\n \r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Undefined\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">5\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n 10\r\n \r\n 10\r\n \r\n 0.12460524\r\n -0.09333788\r\n 0.121004716\r\n 0.056326877\r\n -0.14551695\r\n 0.15405808\r\n -0.11573758\r\n -0.04710006\r\n -0.07827293\r\n 0.20838696\r\n \r\n \r\n <_length xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_values z:Ref=\"8\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n \r\n 10\r\n 1\r\n \r\n 1\r\n 10\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"11\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n 10\r\n 5\r\n \r\n 5\r\n 10\r\n \r\n 0.17511691\r\n 0.47089887\r\n 0.035424966\r\n 0.13475606\r\n 0.67261845\r\n -0.2718503\r\n 0.10566604\r\n -0.60464674\r\n 0.43970263\r\n -0.29115033\r\n 0.10428886\r\n -0.6198461\r\n 0.48591557\r\n 0.17910019\r\n 0.32592162\r\n -0.16302188\r\n 0.1806719\r\n -0.3735993\r\n 0.36282557\r\n 0.23260261\r\n 0.23352039\r\n -0.25772256\r\n 0.3897022\r\n -0.17671733\r\n -0.4039097\r\n -0.03362496\r\n 0.24438019\r\n 0.25626516\r\n -0.037427112\r\n -0.3484412\r\n -0.12899879\r\n 0.13505302\r\n 0.35729623\r\n 0.329511\r\n 0.43211266\r\n 0.19814985\r\n -0.31468257\r\n -0.25889647\r\n -0.6299225\r\n 0.004424079\r\n 0.534701\r\n -0.10919095\r\n -0.62596995\r\n -0.26530302\r\n -0.28526264\r\n -0.13850372\r\n -0.3587619\r\n 0.7884535\r\n 0.19974217\r\n -0.044951133\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">5\r\n <_values z:Ref=\"14\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Relu\r\n <_x003C_BatchNormalization_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">false\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">10\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n \r\n \r\n 10\r\n 1\r\n \r\n 1\r\n 10\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"18\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n 10\r\n 1\r\n \r\n 1\r\n 10\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"21\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Relu\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">10\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n \r\n \r\n \r\n 1\r\n \r\n 1\r\n \r\n 0.12244227\r\n \r\n \r\n <_length xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"26\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n \r\n 1\r\n 1\r\n \r\n 1\r\n 1\r\n \r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"29\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n 1\r\n 10\r\n \r\n 10\r\n 1\r\n \r\n 0.33662203\r\n 0.3745348\r\n 0.11487794\r\n 0.54076385\r\n -0.114136286\r\n 0.2804844\r\n -0.548713\r\n -0.23024562\r\n 0.47862676\r\n 0.2576068\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_values z:Ref=\"32\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Undefined\r\n <_x003C_BatchNormalization_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">false\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n \r\n 1\r\n \r\n 1\r\n 1\r\n \r\n 1\r\n 1\r\n \r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"36\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Undefined\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n GlorotUniform\r\n \r\n"; + readonly string m_regressionNeuralNetModelText = "\r\n\r\n \r\n \r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Undefined\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">5\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n 10\r\n \r\n 10\r\n \r\n 0.12460524\r\n -0.09333788\r\n 0.121004716\r\n 0.056326877\r\n -0.14551695\r\n 0.15405808\r\n -0.11573758\r\n -0.04710006\r\n -0.07827293\r\n 0.20838696\r\n \r\n \r\n <_length xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_values z:Ref=\"8\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n \r\n 10\r\n 1\r\n \r\n 1\r\n 10\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"11\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n 10\r\n 5\r\n \r\n 5\r\n 10\r\n \r\n 0.17511691\r\n 0.47089887\r\n 0.035424966\r\n 0.13475606\r\n 0.67261845\r\n -0.2718503\r\n 0.10566604\r\n -0.60464674\r\n 0.43970263\r\n -0.29115033\r\n 0.10428886\r\n -0.6198461\r\n 0.48591557\r\n 0.17910019\r\n 0.32592162\r\n -0.16302188\r\n 0.1806719\r\n -0.3735993\r\n 0.36282557\r\n 0.23260261\r\n 0.23352039\r\n -0.25772256\r\n 0.3897022\r\n -0.17671733\r\n -0.4039097\r\n -0.03362496\r\n 0.24438019\r\n 0.25626516\r\n -0.037427112\r\n -0.3484412\r\n -0.12899879\r\n 0.13505302\r\n 0.35729623\r\n 0.329511\r\n 0.43211266\r\n 0.19814985\r\n -0.31468257\r\n -0.25889647\r\n -0.6299225\r\n 0.004424079\r\n 0.534701\r\n -0.10919095\r\n -0.62596995\r\n -0.26530302\r\n -0.28526264\r\n -0.13850372\r\n -0.3587619\r\n 0.7884535\r\n 0.19974217\r\n -0.044951133\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">5\r\n <_values z:Ref=\"14\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Relu\r\n <_x003C_BatchNormalization_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">false\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">10\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n \r\n \r\n 10\r\n 1\r\n \r\n 1\r\n 10\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"18\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n 10\r\n 1\r\n \r\n 1\r\n 10\r\n \r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"21\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Relu\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">10\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n \r\n \r\n \r\n 1\r\n \r\n 1\r\n \r\n 0.12244227\r\n \r\n \r\n <_length xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"26\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n \r\n 1\r\n 1\r\n \r\n 1\r\n 1\r\n \r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"29\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n 1\r\n 10\r\n \r\n 10\r\n 1\r\n \r\n 0.33662203\r\n 0.3745348\r\n 0.11487794\r\n 0.54076385\r\n -0.114136286\r\n 0.2804844\r\n -0.548713\r\n -0.23024562\r\n 0.47862676\r\n 0.2576068\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">10\r\n <_values z:Ref=\"32\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Undefined\r\n <_x003C_BatchNormalization_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">false\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n \r\n \r\n 1\r\n <_x003C_ActivationFunc_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">Undefined\r\n <_x003C_Depth_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Height_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n <_x003C_Width_x003E_k__BackingField xmlns=\"http://schemas.datacontract.org/2004/07/SharpLearning.Neural.Layers\">1\r\n \r\n \r\n 1\r\n 1\r\n \r\n 1\r\n 1\r\n \r\n 0\r\n \r\n \r\n <_columnCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_rowCount xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\">1\r\n <_values z:Ref=\"36\" i:nil=\"true\" xmlns=\"http://schemas.datacontract.org/2004/07/MathNet.Numerics.LinearAlgebra.Single\" />\r\n \r\n \r\n \r\n GlorotUniform\r\n \r\n"; [TestMethod] public void RegressionNeuralNetModel_Predict_Single() diff --git a/src/SharpLearning.Neural/Activations/Activiation.cs b/src/SharpLearning.Neural/Activations/Activiation.cs index 94653dcb..caac48e4 100644 --- a/src/SharpLearning.Neural/Activations/Activiation.cs +++ b/src/SharpLearning.Neural/Activations/Activiation.cs @@ -1,7 +1,7 @@ namespace SharpLearning.Neural.Activations; /// -/// Activation type for neural net. +/// Activation type for neural net. /// public enum Activation { @@ -14,5 +14,5 @@ public enum Activation /// Relu activation. /// Relu, - Sigmoid + Sigmoid, } diff --git a/src/SharpLearning.Neural/Activations/ReluActivation.cs b/src/SharpLearning.Neural/Activations/ReluActivation.cs index 052f8e53..b8a92109 100644 --- a/src/SharpLearning.Neural/Activations/ReluActivation.cs +++ b/src/SharpLearning.Neural/Activations/ReluActivation.cs @@ -40,9 +40,6 @@ static float Relu(float input) static float Derivative(float input) { - if (input > 0.0) - return 1.0f; - else - return 0.0f; + return input > 0.0 ? 1.0f : 0.0f; } } diff --git a/src/SharpLearning.Neural/BorderMode.cs b/src/SharpLearning.Neural/BorderMode.cs index 8ac167f3..6e1ed13e 100644 --- a/src/SharpLearning.Neural/BorderMode.cs +++ b/src/SharpLearning.Neural/BorderMode.cs @@ -12,7 +12,7 @@ public enum BorderMode Same, /// - /// Adds no padding. Only applies the kernel within the borders of the image. + /// Adds no padding. Only applies the kernel within the borders of the image. /// Valid, diff --git a/src/SharpLearning.Neural/ConvUtils.cs b/src/SharpLearning.Neural/ConvUtils.cs index b8dbebb5..3bcb9bcb 100644 --- a/src/SharpLearning.Neural/ConvUtils.cs +++ b/src/SharpLearning.Neural/ConvUtils.cs @@ -10,7 +10,7 @@ namespace SharpLearning.Neural; public static class ConvUtils { /// - /// + /// /// /// /// @@ -46,16 +46,13 @@ public static int GetFilterGridLength(int inputLength, int filterSize, { // BorderMode.Same pads with half the filter size on both sides (one less on // the second side for an even filter size) - if (borderMode == BorderMode.Same && filterSize % 2 == 0) - { - return (int)Math.Floor((inputLength + (padding + padding - 1) - filterSize) / (double)stride + 1); - } - - return (int)Math.Floor((inputLength + padding * 2 - filterSize) / (double)stride + 1); + return borderMode == BorderMode.Same && filterSize % 2 == 0 + ? (int)Math.Floor((inputLength + (padding + padding - 1) - filterSize) / (double)stride + 1) + : (int)Math.Floor((inputLength + padding * 2 - filterSize) / (double)stride + 1); } /// - /// + /// /// /// /// @@ -75,7 +72,7 @@ public static float GetValueFromIndex(this Matrix m, int n, int c, int h, } /// - /// + /// /// /// /// @@ -95,7 +92,7 @@ public static int GetDataIndex(this Matrix m, int n, int c, int h, int w, } /// - /// + /// /// /// /// @@ -162,7 +159,7 @@ public static void Batch_Im2Col(Matrix data_im, int channels, int height, } /// - /// + /// /// /// /// @@ -214,7 +211,7 @@ public static void ReshapeConvolutionsToRowMajor(Matrix convoluted, } /// - /// + /// /// /// /// @@ -266,7 +263,7 @@ public static void ReshapeRowMajorToConvolutionLayout(Matrix data_convolu } /// - /// + /// /// /// /// @@ -310,7 +307,6 @@ public static void Batch_Col2Im(Matrix data_col, int channels, int height for (var w = 0; w < width_col; ++w) { - var w_pad = w * stride_w - pad_w + w_offset; if (h_pad >= 0 && h_pad < height && w_pad >= 0 && w_pad < width) { diff --git a/src/SharpLearning.Neural/Initializations/FanInFanOut.cs b/src/SharpLearning.Neural/Initializations/FanInFanOut.cs index ff5439fb..dfff9738 100644 --- a/src/SharpLearning.Neural/Initializations/FanInFanOut.cs +++ b/src/SharpLearning.Neural/Initializations/FanInFanOut.cs @@ -1,7 +1,7 @@ namespace SharpLearning.Neural.Initializations; /// -/// +/// /// public struct FanInFanOut { @@ -11,12 +11,12 @@ public struct FanInFanOut public readonly int FanIn; /// - /// THe fan-out of the layer + /// THe fan-out of the layer /// public readonly int FanOut; /// - /// + /// /// /// /// diff --git a/src/SharpLearning.Neural/Initializations/WeightInitialization.cs b/src/SharpLearning.Neural/Initializations/WeightInitialization.cs index a995a4de..ff68b4df 100644 --- a/src/SharpLearning.Neural/Initializations/WeightInitialization.cs +++ b/src/SharpLearning.Neural/Initializations/WeightInitialization.cs @@ -27,7 +27,6 @@ public static FanInFanOut GetFans(ILayer layer, int inputWidth, int inputHeight, var fanOut = layer.Depth * receptiveFieldSize; return new FanInFanOut(fanIn, fanOut); - } else if (layer is DenseLayer) { @@ -41,7 +40,6 @@ public static FanInFanOut GetFans(ILayer layer, int inputWidth, int inputHeight, var fanOut = (int)Math.Sqrt(layer.Width * layer.Height * layer.Depth); return new FanInFanOut(fanIn, fanOut); - } } diff --git a/src/SharpLearning.Neural/Layers/ActivationLayer.cs b/src/SharpLearning.Neural/Layers/ActivationLayer.cs index d4d8fca2..ec8259af 100644 --- a/src/SharpLearning.Neural/Layers/ActivationLayer.cs +++ b/src/SharpLearning.Neural/Layers/ActivationLayer.cs @@ -27,22 +27,22 @@ public class ActivationLayer : ILayer readonly IActivation m_activation; /// - /// + /// /// public int Width { get; set; } /// - /// + /// /// public int Height { get; set; } /// - /// + /// /// public int Depth { get; set; } /// - /// + /// /// public Activation ActivationFunc { get; set; } @@ -70,7 +70,7 @@ public ActivationLayer(Activation activation) } /// - /// + /// /// /// /// @@ -87,7 +87,7 @@ public Matrix Backward(Matrix delta) } /// - /// + /// /// /// /// @@ -102,7 +102,7 @@ public Matrix Forward(Matrix input) } /// - /// + /// /// /// /// diff --git a/src/SharpLearning.Neural/Layers/BatchNormalizationLayer.cs b/src/SharpLearning.Neural/Layers/BatchNormalizationLayer.cs index f2e8a50c..1dc8c1bd 100644 --- a/src/SharpLearning.Neural/Layers/BatchNormalizationLayer.cs +++ b/src/SharpLearning.Neural/Layers/BatchNormalizationLayer.cs @@ -16,22 +16,22 @@ namespace SharpLearning.Neural.Layers; public sealed class BatchNormalizationLayer : ILayer { /// - /// + /// /// public int Width { get; set; } /// - /// + /// /// public int Height { get; set; } /// - /// + /// /// public int Depth { get; set; } /// - /// + /// /// public Activation ActivationFunc { get; set; } @@ -73,8 +73,8 @@ public sealed class BatchNormalizationLayer : ILayer /// public Vector Bias; - Matrix ScaleGradients; - Vector BiasGradients; + Matrix m_scaleGradients; + Vector m_biasGradients; /// /// BatchNormalizationLayer. Batch normalization can be added to accelerate the learning process of a neural net. @@ -86,7 +86,7 @@ public BatchNormalizationLayer() } /// - /// + /// /// /// /// @@ -96,8 +96,9 @@ public Matrix Backward(Matrix delta) var diff_dst = delta; var scaleshift = Scale; var diff_src = m_delta; - var diff_scaleshift = ScaleGradients; + var diff_scaleshift = m_scaleGradients; +#pragma warning disable IDE1006 // Naming Styles var N = diff_src.RowCount; var C = Depth; var H = Height; @@ -112,20 +113,27 @@ public Matrix Backward(Matrix delta) var diff_beta = 0.0f; for (var n = 0; n < N; ++n) + { for (var h = 0; h < H; ++h) + { for (var w = 0; w < W; ++w) { diff_gamma += (src.GetValueFromIndex(n, c, h, w, Depth, Width, Height) - mean) * diff_dst.GetValueFromIndex(n, c, h, w, Depth, Width, Height); diff_beta += diff_dst.GetValueFromIndex(n, c, h, w, Depth, Width, Height); } + } + } + diff_gamma *= variance; - ScaleGradients.At(0, c, diff_gamma); - BiasGradients[c] = diff_beta; + m_scaleGradients.At(0, c, diff_gamma); + m_biasGradients[c] = diff_beta; for (var n = 0; n < N; ++n) + { for (var h = 0; h < H; ++h) + { for (var w = 0; w < W; ++w) { var diffSrcIndex = diff_src.GetDataIndex(n, c, h, w, Depth, Width, Height); @@ -135,13 +143,15 @@ public Matrix Backward(Matrix delta) * diff_gamma * variance / (W * H * N); diff_src.Data()[diffSrcIndex] *= gamma * variance; } + } + } }); - +#pragma warning restore IDE1006 // Naming Styles return m_delta; } /// - /// + /// /// /// /// @@ -151,7 +161,7 @@ public Matrix Forward(Matrix input) var src = input; var dst = OutputActivations.Data(); - +#pragma warning disable IDE1006 // Naming Styles var N = input.RowCount; // number of items in mini batch var C = Depth; var H = Height; @@ -168,18 +178,30 @@ public Matrix Forward(Matrix input) if (is_training) { for (var n = 0; n < N; ++n) + { for (var h = 0; h < H; ++h) + { for (var w = 0; w < W; ++w) + { mean += src.GetValueFromIndex(n, c, h, w, Depth, Width, Height); + } + } + } + mean /= W * N * H; for (var n = 0; n < N; ++n) + { for (var h = 0; h < H; ++h) + { for (var w = 0; w < W; ++w) { var m = src.GetValueFromIndex(n, c, h, w, Depth, Width, Height) - mean; variance += m * m; } + } + } + variance = 1f / (float)Math.Sqrt(variance / (W * H * N) + eps); } else @@ -189,7 +211,9 @@ public Matrix Forward(Matrix input) } for (var n = 0; n < N; ++n) + { for (var h = 0; h < H; ++h) + { for (var w = 0; w < W; ++w) { var d_off = src.GetDataIndex(n, c, h, w, Depth, Width, Height); @@ -197,7 +221,9 @@ public Matrix Forward(Matrix input) var bias = Bias[c]; dst[d_off] = scale * (src.Data()[d_off] - mean) * variance + bias; } - + } + } +#pragma warning restore IDE1006 // Naming Styles if (is_training) { MovingAverageMeans[c] = MovingAverage(MovingAverageMeans[c], mean); @@ -223,15 +249,15 @@ static float MovingAverage(float currentValue, float value, float momentum = 0.9 /// public void AddParameresAndGradients(List parametersAndGradients) { - var scale = new ParametersAndGradients(Scale.Data(), ScaleGradients.Data()); - var bias = new ParametersAndGradients(Bias.Data(), BiasGradients.Data()); + var scale = new ParametersAndGradients(Scale.Data(), m_scaleGradients.Data()); + var bias = new ParametersAndGradients(Bias.Data(), m_biasGradients.Data()); parametersAndGradients.Add(scale); parametersAndGradients.Add(bias); } /// - /// + /// /// /// /// @@ -256,8 +282,8 @@ public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batc MovingAverageMeans = new float[inputDepth]; MovingAverageVariance = Enumerable.Range(0, inputDepth).Select(v => 1.0f).ToArray(); - ScaleGradients = Matrix.Build.Dense(1, fanOutAndIn, 1); - BiasGradients = Vector.Build.Dense(fanOutAndIn); + m_scaleGradients = Matrix.Build.Dense(1, fanOutAndIn, 1); + m_biasGradients = Vector.Build.Dense(fanOutAndIn); OutputActivations = Matrix.Build.Dense(batchSize, fanOutAndIn); @@ -265,7 +291,7 @@ public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batc } /// - /// + /// /// /// public void CopyLayerForPredictionModel(List layers) diff --git a/src/SharpLearning.Neural/Layers/Conv2DLayer.cs b/src/SharpLearning.Neural/Layers/Conv2DLayer.cs index 0f15f19f..217b829f 100644 --- a/src/SharpLearning.Neural/Layers/Conv2DLayer.cs +++ b/src/SharpLearning.Neural/Layers/Conv2DLayer.cs @@ -7,7 +7,7 @@ namespace SharpLearning.Neural.Layers; /// -/// 2D Convolutional layer using GEMM implementation +/// 2D Convolutional layer using GEMM implementation /// based on: https://petewarden.com/2015/04/20/why-gemm-is-at-the-heart-of-deep-learning/ /// and: https://arxiv.org/pdf/1410.0759.pdf /// @@ -15,22 +15,22 @@ namespace SharpLearning.Neural.Layers; public sealed class Conv2DLayer : ILayer, IBatchNormalizable { /// - /// + /// /// public int Width { get; set; } /// - /// + /// /// public int Height { get; set; } /// - /// + /// /// public int Depth { get; set; } /// - /// + /// /// public Activation ActivationFunc { get; set; } @@ -44,17 +44,17 @@ public sealed class Conv2DLayer : ILayer, IBatchNormalizable readonly int m_stride = 1; /// - /// + /// /// public readonly int FilterWidth; /// - /// + /// /// public readonly int FilterHeight; /// - /// + /// /// public readonly int FilterCount; @@ -79,7 +79,7 @@ public sealed class Conv2DLayer : ILayer, IBatchNormalizable public Vector BiasGradients; /// - /// + /// /// public Matrix OutputActivations; @@ -87,17 +87,17 @@ public sealed class Conv2DLayer : ILayer, IBatchNormalizable Matrix m_delta; /// - /// + /// /// public int InputHeight; /// - /// + /// /// public int InputWidth; /// - /// + /// /// public int InputDepth; @@ -118,7 +118,7 @@ public sealed class Conv2DLayer : ILayer, IBatchNormalizable public BorderMode BorderMode; /// - /// 2D Convolutional layer using GEMM implementation + /// 2D Convolutional layer using GEMM implementation /// based on: https://petewarden.com/2015/04/20/why-gemm-is-at-the-heart-of-deep-learning/ /// and: https://arxiv.org/pdf/1410.0759.pdf /// @@ -139,7 +139,6 @@ public Conv2DLayer(int filterWidth, int filterHeight, int filterCount, int strid if (padHeight < 0) { throw new ArgumentException("padHeight is less than 0: " + padHeight); } if (stride < 1) { throw new ArgumentException("stride is less than 0: " + stride); } - FilterWidth = filterWidth; FilterHeight = filterHeight; FilterCount = filterCount; @@ -152,7 +151,7 @@ public Conv2DLayer(int filterWidth, int filterHeight, int filterCount, int strid } /// - /// 2D Convolutional layer using GEMM implementation + /// 2D Convolutional layer using GEMM implementation /// based on: https://petewarden.com/2015/04/20/why-gemm-is-at-the-heart-of-deep-learning/ /// and: https://arxiv.org/pdf/1410.0759.pdf /// @@ -160,7 +159,7 @@ public Conv2DLayer(int filterWidth, int filterHeight, int filterCount, int strid /// The height of the filters /// The number of filters /// Controls the distance between each neighboring filter (default is 1) - /// Border mode of the convolutional operation. + /// Border mode of the convolutional operation. /// This will set the width and height padding automatically based on the selected border mode: Valid, Same or Full (default is Valid) /// Type of activation function used (default is Relu) public Conv2DLayer(int filterWidth, int filterHeight, int filterCount, int stride = 1, @@ -173,9 +172,8 @@ public Conv2DLayer(int filterWidth, int filterHeight, int filterCount, int strid BorderMode = borderMode; } - /// - /// + /// /// /// /// @@ -203,7 +201,7 @@ public Matrix Backward(Matrix delta) } /// - /// + /// /// /// /// @@ -229,7 +227,7 @@ public Matrix Forward(Matrix input) } /// - /// + /// /// /// /// @@ -237,7 +235,6 @@ public Matrix Forward(Matrix input) /// /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { @@ -279,7 +276,7 @@ public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batc } /// - /// + /// /// /// public void AddParameresAndGradients(List parametersAndGradients) diff --git a/src/SharpLearning.Neural/Layers/DenseLayer.cs b/src/SharpLearning.Neural/Layers/DenseLayer.cs index ab2e40bb..b77b7f4d 100644 --- a/src/SharpLearning.Neural/Layers/DenseLayer.cs +++ b/src/SharpLearning.Neural/Layers/DenseLayer.cs @@ -13,22 +13,22 @@ namespace SharpLearning.Neural.Layers; public sealed class DenseLayer : ILayer, IBatchNormalizable { /// - /// + /// /// public int Width { get; set; } /// - /// + /// /// public int Height { get; set; } /// - /// + /// /// public int Depth { get; set; } /// - /// + /// /// public Activation ActivationFunc { get; set; } @@ -110,7 +110,7 @@ public Matrix Forward(Matrix input) } /// - /// + /// /// /// /// @@ -118,7 +118,6 @@ public Matrix Forward(Matrix input) /// /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { @@ -137,7 +136,7 @@ public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batc } /// - /// + /// /// /// public void AddParameresAndGradients(List parametersAndGradients) diff --git a/src/SharpLearning.Neural/Layers/DropoutLayer.cs b/src/SharpLearning.Neural/Layers/DropoutLayer.cs index 9dd6c327..4ca24ccd 100644 --- a/src/SharpLearning.Neural/Layers/DropoutLayer.cs +++ b/src/SharpLearning.Neural/Layers/DropoutLayer.cs @@ -26,22 +26,22 @@ public sealed class DropoutLayer : ILayer Matrix m_delta; /// - /// + /// /// public int Width { get; set; } /// - /// + /// /// public int Height { get; set; } /// - /// + /// /// public int Depth { get; set; } /// - /// + /// /// public Activation ActivationFunc { get; set; } @@ -59,7 +59,7 @@ public DropoutLayer(double dropOut = 0.0) } /// - /// + /// /// /// public Matrix Backward(Matrix delta) @@ -71,7 +71,7 @@ public Matrix Backward(Matrix delta) } /// - /// + /// /// /// public Matrix Forward(Matrix input) @@ -83,7 +83,7 @@ public Matrix Forward(Matrix input) } /// - /// + /// /// /// /// @@ -119,7 +119,7 @@ void UpdateDropoutMask() } /// - /// + /// /// /// public void AddParameresAndGradients(List parametersAndGradients) diff --git a/src/SharpLearning.Neural/Layers/IClassificationLayer.cs b/src/SharpLearning.Neural/Layers/IClassificationLayer.cs index 553fbb45..f25d5de0 100644 --- a/src/SharpLearning.Neural/Layers/IClassificationLayer.cs +++ b/src/SharpLearning.Neural/Layers/IClassificationLayer.cs @@ -3,6 +3,4 @@ /// /// Maker interface for classification layers. /// -public interface IClassificationLayer -{ -} +public interface IClassificationLayer; diff --git a/src/SharpLearning.Neural/Layers/IOutputLayer.cs b/src/SharpLearning.Neural/Layers/IOutputLayer.cs index baa60c2f..f2914303 100644 --- a/src/SharpLearning.Neural/Layers/IOutputLayer.cs +++ b/src/SharpLearning.Neural/Layers/IOutputLayer.cs @@ -3,6 +3,4 @@ /// /// Marker interface for output layers. /// -public interface IOutputLayer -{ -} +public interface IOutputLayer; diff --git a/src/SharpLearning.Neural/Layers/IRegressionLayer.cs b/src/SharpLearning.Neural/Layers/IRegressionLayer.cs index bed16ec2..d7ef0ce8 100644 --- a/src/SharpLearning.Neural/Layers/IRegressionLayer.cs +++ b/src/SharpLearning.Neural/Layers/IRegressionLayer.cs @@ -3,6 +3,4 @@ /// /// Maker interface for regression layers. /// -public interface IRegressionLayer -{ -} +public interface IRegressionLayer; diff --git a/src/SharpLearning.Neural/Layers/InputLayer.cs b/src/SharpLearning.Neural/Layers/InputLayer.cs index f6e74b06..edb4d94c 100644 --- a/src/SharpLearning.Neural/Layers/InputLayer.cs +++ b/src/SharpLearning.Neural/Layers/InputLayer.cs @@ -7,33 +7,33 @@ namespace SharpLearning.Neural.Layers; /// -/// +/// /// [Serializable] public sealed class InputLayer : ILayer { /// - /// + /// /// public int Width { get; set; } /// - /// + /// /// public int Height { get; set; } /// - /// + /// /// public int Depth { get; set; } /// - /// + /// /// public Activation ActivationFunc { get; set; } /// - /// + /// /// /// public InputLayer(int inputUnits) @@ -42,7 +42,7 @@ public InputLayer(int inputUnits) } /// - /// + /// /// /// /// @@ -60,7 +60,7 @@ public InputLayer(int width, int height, int depth) } /// - /// + /// /// /// public Matrix Backward(Matrix delta) @@ -69,7 +69,7 @@ public Matrix Backward(Matrix delta) } /// - /// + /// /// /// public Matrix Forward(Matrix input) @@ -78,7 +78,7 @@ public Matrix Forward(Matrix input) } /// - /// + /// /// /// /// @@ -86,7 +86,6 @@ public Matrix Forward(Matrix input) /// /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { @@ -94,7 +93,7 @@ public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batc } /// - /// + /// /// /// public void AddParameresAndGradients(List parametersAndGradients) diff --git a/src/SharpLearning.Neural/Layers/MaxPool2DLayer.cs b/src/SharpLearning.Neural/Layers/MaxPool2DLayer.cs index bbca280c..5f73f5e7 100644 --- a/src/SharpLearning.Neural/Layers/MaxPool2DLayer.cs +++ b/src/SharpLearning.Neural/Layers/MaxPool2DLayer.cs @@ -15,37 +15,37 @@ namespace SharpLearning.Neural.Layers; public sealed class MaxPool2DLayer : ILayer { /// - /// + /// /// public int Width { get; set; } /// - /// + /// /// public int Height { get; set; } /// - /// + /// /// public int Depth { get; set; } /// - /// + /// /// public Activation ActivationFunc { get; set; } /// - /// + /// /// public int InputHeight; /// - /// + /// /// public int InputWidth; /// - /// + /// /// public int InputDepth; @@ -66,7 +66,7 @@ public sealed class MaxPool2DLayer : ILayer public int[][] Switchy; /// - /// + /// /// public Matrix OutputActivations; @@ -78,9 +78,9 @@ public sealed class MaxPool2DLayer : ILayer public BorderMode BorderMode; /// - /// Max pool layer. - /// The max pool layers function is to progressively reduce the spatial size of the representation - /// to reduce the amount of parameters and computation in the network. + /// Max pool layer. + /// The max pool layers function is to progressively reduce the spatial size of the representation + /// to reduce the amount of parameters and computation in the network. /// The reduction is only done on the width and height. Depth dimension is preserved. /// /// The width of the pool area (default is 2) @@ -106,15 +106,15 @@ public MaxPool2DLayer(int poolWidth, int poolHeight, int stride, int padWidth, i } /// - /// Max pool layer. - /// The max pool layers function is to progressively reduce the spatial size of the representation - /// to reduce the amount of parameters and computation in the network. + /// Max pool layer. + /// The max pool layers function is to progressively reduce the spatial size of the representation + /// to reduce the amount of parameters and computation in the network. /// The reduction is only done on the width and height. Depth dimension is preserved. /// /// The width of the pool area (default is 2) /// The height of the pool area (default is 2) /// Controls the distance between each neighboring pool areas (default is 2) - /// Border mode of the max pool operation. + /// Border mode of the max pool operation. /// This will set the width and height padding automatically based on the selected border mode: Valid, Same or Full (default is Valid). public MaxPool2DLayer(int poolWidth, int poolHeight, int stride = 2, BorderMode borderMode = BorderMode.Valid) @@ -126,33 +126,27 @@ public MaxPool2DLayer(int poolWidth, int poolHeight, int stride = 2, } /// - /// + /// /// /// /// public Matrix Backward(Matrix delta) { // enumerate each batch item one at a time - Parallel.For(0, delta.RowCount, i => - { - BackwardSingleItem(delta, m_delta, i); - }); + Parallel.For(0, delta.RowCount, i => BackwardSingleItem(delta, m_delta, i)); return m_delta; } /// - /// + /// /// /// /// public Matrix Forward(Matrix input) { // enumerate each batch item one at a time - Parallel.For(0, input.RowCount, i => - { - ForwardSingleItem(input, OutputActivations, i); - }); + Parallel.For(0, input.RowCount, i => ForwardSingleItem(input, OutputActivations, i)); return OutputActivations; } @@ -255,7 +249,7 @@ void BackwardSingleItem(Matrix inputGradient, Matrix outputGradien } /// - /// + /// /// /// public void AddParameresAndGradients(List parametersAndGradients) @@ -264,7 +258,7 @@ public void AddParameresAndGradients(List parametersAndG } /// - /// + /// /// /// /// @@ -272,7 +266,6 @@ public void AddParameresAndGradients(List parametersAndG /// /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { diff --git a/src/SharpLearning.Neural/Layers/SoftMaxLayer.cs b/src/SharpLearning.Neural/Layers/SoftMaxLayer.cs index ad293a6f..e8675f08 100644 --- a/src/SharpLearning.Neural/Layers/SoftMaxLayer.cs +++ b/src/SharpLearning.Neural/Layers/SoftMaxLayer.cs @@ -8,8 +8,8 @@ namespace SharpLearning.Neural.Layers; /// /// SoftMax Layer. -/// The Softmax classifier is the generalization of the binary logistic regression classifier to multiple classes. -/// Unlike the SVM which treats the outputs as (uncalibrated and possibly difficult to interpret) scores for each class, +/// The Softmax classifier is the generalization of the binary logistic regression classifier to multiple classes. +/// Unlike the SVM which treats the outputs as (uncalibrated and possibly difficult to interpret) scores for each class, /// the Softmax classifier gives a slightly more intuitive output (normalized class probabilities. /// However, the softmax might sacrifice accuracy in order to achieve better probabilities. /// @@ -19,37 +19,37 @@ public sealed class SoftMaxLayer , IOutputLayer , IClassificationLayer { - Matrix OutputActivations; + Matrix m_outputActivations; Matrix m_delta; /// - /// + /// /// public int NumberOfClasses; /// - /// + /// /// public int Width { get; set; } /// - /// + /// /// public int Height { get; set; } /// - /// + /// /// public int Depth { get; set; } /// - /// + /// /// public Activation ActivationFunc { get; set; } /// - /// The Softmax classifier is the generalization of the binary logistic regression classifier to multiple classes. - /// Unlike the SVM which treats the outputs as (uncalibrated and possibly difficult to interpret) scores for each class, + /// The Softmax classifier is the generalization of the binary logistic regression classifier to multiple classes. + /// Unlike the SVM which treats the outputs as (uncalibrated and possibly difficult to interpret) scores for each class, /// the Softmax classifier gives a slightly more intuitive output (normalized class probabilities. /// However, the softmax might sacrifice accuracy in order to achieve better propabilities. /// @@ -66,32 +66,32 @@ public SoftMaxLayer(int numberOfClasses) } /// - /// + /// /// /// public Matrix Backward(Matrix delta) { - delta.Subtract(OutputActivations, m_delta); + delta.Subtract(m_outputActivations, m_delta); m_delta.Multiply(-1f, m_delta); return m_delta; } /// - /// + /// /// /// /// public Matrix Forward(Matrix input) { - input.CopyTo(OutputActivations); - SoftMax(OutputActivations); + input.CopyTo(m_outputActivations); + SoftMax(m_outputActivations); - return OutputActivations; + return m_outputActivations; } /// - /// + /// /// /// /// @@ -99,10 +99,9 @@ public Matrix Forward(Matrix input) /// /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { - OutputActivations = Matrix.Build.Dense(batchSize, NumberOfClasses); + m_outputActivations = Matrix.Build.Dense(batchSize, NumberOfClasses); m_delta = Matrix.Build.Dense(batchSize, NumberOfClasses); } @@ -143,13 +142,13 @@ public static void SoftMax(Matrix x) for (var col = 0; col < x.ColumnCount; ++col) { var index = col * rows + row; - xData[index] = xData[index] / rowSum; + xData[index] /= rowSum; } } } /// - /// + /// /// /// public void AddParameresAndGradients(List parametersAndGradients) @@ -165,7 +164,7 @@ public void CopyLayerForPredictionModel(List layers) { var batchSize = 1; var copy = new SoftMaxLayer(NumberOfClasses); - copy.OutputActivations = Matrix.Build.Dense(batchSize, NumberOfClasses); + copy.m_outputActivations = Matrix.Build.Dense(batchSize, NumberOfClasses); layers.Add(copy); } diff --git a/src/SharpLearning.Neural/Layers/SquaredErrorRegressionLayer.cs b/src/SharpLearning.Neural/Layers/SquaredErrorRegressionLayer.cs index ee20ac13..fabda257 100644 --- a/src/SharpLearning.Neural/Layers/SquaredErrorRegressionLayer.cs +++ b/src/SharpLearning.Neural/Layers/SquaredErrorRegressionLayer.cs @@ -12,31 +12,31 @@ namespace SharpLearning.Neural.Layers; [Serializable] public sealed class SquaredErrorRegressionLayer : ILayer, IOutputLayer, IRegressionLayer { - Matrix OutputActivations; + Matrix m_outputActivations; Matrix m_delta; /// - /// + /// /// public int NumberOfTargets; /// - /// + /// /// public int Width { get; set; } /// - /// + /// /// public int Height { get; set; } /// - /// + /// /// public int Depth { get; set; } /// - /// + /// /// public Activation ActivationFunc { get; set; } @@ -56,14 +56,14 @@ public SquaredErrorRegressionLayer(int numberOfTargets = 1) } /// - /// + /// /// /// /// public Matrix Backward(Matrix delta) { var targetsArray = delta.Data(); - var predictionsArray = OutputActivations.Data(); + var predictionsArray = m_outputActivations.Data(); var deltaData = m_delta.Data(); for (var i = 0; i < targetsArray.Length; i++) @@ -75,18 +75,18 @@ public Matrix Backward(Matrix delta) } /// - /// + /// /// /// /// public Matrix Forward(Matrix input) { - input.CopyTo(OutputActivations); // do nothing, output raw scores - return OutputActivations; + input.CopyTo(m_outputActivations); // do nothing, output raw scores + return m_outputActivations; } /// - /// + /// /// /// /// @@ -94,23 +94,22 @@ public Matrix Forward(Matrix input) /// /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { - OutputActivations = Matrix.Build.Dense(batchSize, NumberOfTargets); + m_outputActivations = Matrix.Build.Dense(batchSize, NumberOfTargets); m_delta = Matrix.Build.Dense(batchSize, NumberOfTargets); } /// - /// + /// /// /// public void CopyLayerForPredictionModel(List layers) { var batchSize = 1; var copy = new SquaredErrorRegressionLayer(NumberOfTargets); - copy.OutputActivations = Matrix.Build.Dense(batchSize, NumberOfTargets); + copy.m_outputActivations = Matrix.Build.Dense(batchSize, NumberOfTargets); layers.Add(copy); } diff --git a/src/SharpLearning.Neural/Layers/SvmLayer.cs b/src/SharpLearning.Neural/Layers/SvmLayer.cs index bd75dc7f..6151ec60 100644 --- a/src/SharpLearning.Neural/Layers/SvmLayer.cs +++ b/src/SharpLearning.Neural/Layers/SvmLayer.cs @@ -8,7 +8,7 @@ namespace SharpLearning.Neural.Layers; /// /// SvmLayer. -/// Because the SVM is a margin classifier, it is happy once the margins are satisfied +/// Because the SVM is a margin classifier, it is happy once the margins are satisfied /// and it does not micromanage the exact scores beyond this constraint. /// This can be an advantage when the overall goal is the best possible accuracy. And probability estimates is less important. /// @@ -18,36 +18,36 @@ public sealed class SvmLayer , IOutputLayer , IClassificationLayer { - Matrix OutputActivations; + Matrix m_outputActivations; Matrix m_delta; /// - /// + /// /// public int NumberOfClasses; /// - /// + /// /// public int Width { get; set; } /// - /// + /// /// public int Height { get; set; } /// - /// + /// /// public int Depth { get; set; } /// - /// + /// /// public Activation ActivationFunc { get; set; } /// - /// Because the SVM is a margin classifier, it is happy once the margins are satisfied + /// Because the SVM is a margin classifier, it is happy once the margins are satisfied /// and it does not micromanage the exact scores beyond this constraint. /// This can be an advantage when the overall goal is the best possible accuracy. And probability estimates is less important. /// @@ -64,7 +64,7 @@ public SvmLayer(int numberOfClasses) } /// - /// + /// /// /// public Matrix Backward(Matrix delta) @@ -87,17 +87,17 @@ public Matrix Backward(Matrix delta) } } - var maxTargetScore = OutputActivations.At(batchItem, maxTargetIndex); - for (var i = 0; i < OutputActivations.ColumnCount; i++) + var maxTargetScore = m_outputActivations.At(batchItem, maxTargetIndex); + for (var i = 0; i < m_outputActivations.ColumnCount; i++) { if (i == maxTargetIndex) { continue; } // The score of the target should be higher than he score of any other class, by a margin - var diff = -maxTargetScore + OutputActivations.At(batchItem, i) + margin; + var diff = -maxTargetScore + m_outputActivations.At(batchItem, i) + margin; if (diff > 0) { - m_delta[batchItem, i] += 1; - m_delta[batchItem, maxTargetIndex] -= 1; + m_delta[batchItem, i]++; + m_delta[batchItem, maxTargetIndex]--; } } } @@ -106,18 +106,18 @@ public Matrix Backward(Matrix delta) } /// - /// + /// /// /// /// public Matrix Forward(Matrix input) { - input.CopyTo(OutputActivations); // do nothing, output raw scores - return OutputActivations; + input.CopyTo(m_outputActivations); // do nothing, output raw scores + return m_outputActivations; } /// - /// + /// /// /// /// @@ -125,11 +125,10 @@ public Matrix Forward(Matrix input) /// /// /// - public void Initialize(int inputWidth, int inputHeight, int inputDepth, int batchSize, Initialization initializtion, Random random) { - OutputActivations = Matrix.Build.Dense(batchSize, NumberOfClasses); + m_outputActivations = Matrix.Build.Dense(batchSize, NumberOfClasses); m_delta = Matrix.Build.Dense(batchSize, NumberOfClasses); } @@ -150,7 +149,7 @@ public void CopyLayerForPredictionModel(List layers) { var batchSize = 1; var copy = new SvmLayer(NumberOfClasses); - copy.OutputActivations = Matrix.Build.Dense(batchSize, NumberOfClasses); + copy.m_outputActivations = Matrix.Build.Dense(batchSize, NumberOfClasses); layers.Add(copy); } diff --git a/src/SharpLearning.Neural/Learners/ClassificationNeuralNetLearner.cs b/src/SharpLearning.Neural/Learners/ClassificationNeuralNetLearner.cs index 2a19ca44..b255a9e1 100644 --- a/src/SharpLearning.Neural/Learners/ClassificationNeuralNetLearner.cs +++ b/src/SharpLearning.Neural/Learners/ClassificationNeuralNetLearner.cs @@ -12,7 +12,7 @@ namespace SharpLearning.Neural.Learners; /// -/// ClassificationNeuralNet learner using mini-batch gradient descent. +/// ClassificationNeuralNet learner using mini-batch gradient descent. /// Several optimization methods is available through the constructor. /// public sealed class ClassificationNeuralNetLearner @@ -24,7 +24,7 @@ public sealed class ClassificationNeuralNetLearner readonly NeuralNetLearner m_learner; /// - /// ClassificationNeuralNet learner using mini-batch gradient descent. + /// ClassificationNeuralNet learner using mini-batch gradient descent. /// Several optimization methods is available through the constructor. /// /// The neural net to learn diff --git a/src/SharpLearning.Neural/Learners/NeuralNetLearner.cs b/src/SharpLearning.Neural/Learners/NeuralNetLearner.cs index c63eab0b..3b708a65 100644 --- a/src/SharpLearning.Neural/Learners/NeuralNetLearner.cs +++ b/src/SharpLearning.Neural/Learners/NeuralNetLearner.cs @@ -33,7 +33,7 @@ public class NeuralNetLearner /// Neural net learner. Controls the learning process using mini-batch gradient descent. /// /// The neural net to learn - /// Controls how the training targets should be decoded. + /// Controls how the training targets should be decoded. /// This is different depending on if the net should be used for regression or classification. /// The loss measured and shown between each iteration /// Controls the step size when updating the weights. (Default is 0.001) @@ -251,7 +251,7 @@ public NeuralNet Learn(F64Matrix observations, double[] targets, int[] indices, timer.Stop(); Trace.WriteLine(string.Format("Iteration: {0:000} - Loss {1:0.00000} - Validation: {2:0.00000} - Time (ms): {3}", - (iteration + 1), currentLoss, validationLoss, timer.ElapsedMilliseconds)); + iteration + 1, currentLoss, validationLoss, timer.ElapsedMilliseconds)); if (validationLoss < bestLoss) { @@ -264,7 +264,7 @@ public NeuralNet Learn(F64Matrix observations, double[] targets, int[] indices, timer.Stop(); Trace.WriteLine(string.Format("Iteration: {0:000} - Loss {1:0.00000} - Time (ms): {2}", - (iteration + 1), currentLoss, timer.ElapsedMilliseconds)); + iteration + 1, currentLoss, timer.ElapsedMilliseconds)); } if (double.IsNaN(currentLoss)) @@ -274,14 +274,7 @@ public NeuralNet Learn(F64Matrix observations, double[] targets, int[] indices, } } - if (earlyStopping) - { - return bestNeuralNet; - } - else - { - return m_net.CopyNetForPredictionModel(); - } + return earlyStopping ? bestNeuralNet : m_net.CopyNetForPredictionModel(); } static void SetupLinerAlgebraProvider() diff --git a/src/SharpLearning.Neural/Learners/RegressionNeuralNetLearner.cs b/src/SharpLearning.Neural/Learners/RegressionNeuralNetLearner.cs index 6b56d231..2ba37145 100644 --- a/src/SharpLearning.Neural/Learners/RegressionNeuralNetLearner.cs +++ b/src/SharpLearning.Neural/Learners/RegressionNeuralNetLearner.cs @@ -11,14 +11,14 @@ namespace SharpLearning.Neural.Learners; /// -/// RegressionNeuralNet learner using mini-batch gradient descent. +/// RegressionNeuralNet learner using mini-batch gradient descent. /// public sealed class RegressionNeuralNetLearner : IIndexedLearner, ILearner { readonly NeuralNetLearner m_learner; /// - /// RegressionNeuralNet learner using mini-batch gradient descent. + /// RegressionNeuralNet learner using mini-batch gradient descent. /// Several optimization methods is available through the constructor. /// /// The neural net to learn diff --git a/src/SharpLearning.Neural/Loss/LogLoss.cs b/src/SharpLearning.Neural/Loss/LogLoss.cs index 66304912..5231058e 100644 --- a/src/SharpLearning.Neural/Loss/LogLoss.cs +++ b/src/SharpLearning.Neural/Loss/LogLoss.cs @@ -5,7 +5,7 @@ namespace SharpLearning.Neural.Loss; /// /// Log loss for neuralnet learner. -/// This error metric is used when one needs to predict that something is true or false with a probability (likelihood) +/// This error metric is used when one needs to predict that something is true or false with a probability (likelihood) /// ranging from definitely true (1) to equally true (0.5) to definitely false(0). /// The use of log on the error provides extreme punishments for being both confident and wrong. /// https://www.kaggle.com/wiki/LogarithmicLoss @@ -14,7 +14,7 @@ public sealed class LogLoss : ILoss { /// /// returns log los - /// This error metric is used when one needs to predict that something is true or false with a probability (likelihood) + /// This error metric is used when one needs to predict that something is true or false with a probability (likelihood) /// ranging from definitely true (1) to equally true (0.5) to definitely false(0). /// The use of log on the error provides extreme punishments for being both confident and wrong. /// diff --git a/src/SharpLearning.Neural/Loss/SquareLoss.cs b/src/SharpLearning.Neural/Loss/SquareLoss.cs index cddba93e..251676c5 100644 --- a/src/SharpLearning.Neural/Loss/SquareLoss.cs +++ b/src/SharpLearning.Neural/Loss/SquareLoss.cs @@ -3,7 +3,7 @@ namespace SharpLearning.Neural.Loss; /// -/// Square loss for for neuralnet learner. +/// Square loss for neuralnet learner. /// The square loss function is the standard method of fitting regression models. /// The square loss is however sensitive to outliers since it weighs larger errors more heavily than small ones. /// diff --git a/src/SharpLearning.Neural/MathNetExtensions.cs b/src/SharpLearning.Neural/MathNetExtensions.cs index b850ab9d..97527e3c 100644 --- a/src/SharpLearning.Neural/MathNetExtensions.cs +++ b/src/SharpLearning.Neural/MathNetExtensions.cs @@ -172,7 +172,7 @@ public static float ElementWiseMultiplicationSum(this Matrix m1, Matrix - /// + /// /// /// /// @@ -192,12 +192,12 @@ public static void ColumnWiseMean(this Matrix m, Vector v) vData[col] += mData[mIndex]; } - vData[col] = vData[col] / (float)m.RowCount; + vData[col] /= (float)m.RowCount; } } /// - /// Sums the columns of m into the vector sums. + /// Sums the columns of m into the vector sums. /// /// /// @@ -207,7 +207,7 @@ public static void SumColumns(this Matrix m, Vector sums) } /// - /// Sums the columns of m into the vector sums. + /// Sums the columns of m into the vector sums. /// /// /// @@ -229,7 +229,7 @@ public static void SumColumns(this Matrix m, float[] sums) } /// - /// Sums the rows of m into the vector sums. + /// Sums the rows of m into the vector sums. /// /// /// @@ -239,7 +239,7 @@ public static void SumRows(this Matrix m, Vector sums) } /// - /// Sums the rows of m into the vector sums. + /// Sums the rows of m into the vector sums. /// /// /// @@ -293,7 +293,7 @@ public static void Row(this Matrix m, int rowIndex, float[] row) } /// - /// Gets the underlying data array from the matrix. + /// Gets the underlying data array from the matrix. /// Data is stored as Column-Major. /// /// @@ -304,7 +304,7 @@ public static float[] Data(this Matrix m) } /// - /// Gets the underlying data array from the vector. + /// Gets the underlying data array from the vector. /// /// /// diff --git a/src/SharpLearning.Neural/Models/ClassificationNeuralNetModel.cs b/src/SharpLearning.Neural/Models/ClassificationNeuralNetModel.cs index 1b49b0e8..171345ca 100644 --- a/src/SharpLearning.Neural/Models/ClassificationNeuralNetModel.cs +++ b/src/SharpLearning.Neural/Models/ClassificationNeuralNetModel.cs @@ -9,7 +9,6 @@ namespace SharpLearning.Neural.Models; - /// /// Classification neural net model. /// @@ -99,7 +98,6 @@ public ProbabilityPrediction PredictProbability(double[] observation) for (var i = 0; i < m_targetNames.Length; i++) { - probabilityDictionary.Add(m_targetNames[i], probabilities[0, i]); if (probabilities[0, i] > probability) { @@ -164,7 +162,7 @@ public static ClassificationNeuralNetModel Load(Func reader) var types = new Type[] { typeof(DenseVectorStorage), - typeof(DenseColumnMajorMatrixStorage) + typeof(DenseColumnMajorMatrixStorage), }; return new GenericXmlDataContractSerializer(types) @@ -180,7 +178,7 @@ public void Save(Func writer) var types = new Type[] { typeof(DenseVectorStorage), - typeof(DenseColumnMajorMatrixStorage) + typeof(DenseColumnMajorMatrixStorage), }; new GenericXmlDataContractSerializer(types) diff --git a/src/SharpLearning.Neural/Models/NeuralNet.cs b/src/SharpLearning.Neural/Models/NeuralNet.cs index a4e17f74..4e20ed01 100644 --- a/src/SharpLearning.Neural/Models/NeuralNet.cs +++ b/src/SharpLearning.Neural/Models/NeuralNet.cs @@ -22,7 +22,7 @@ public sealed class NeuralNet readonly Initialization m_initialization; /// - /// + /// /// /// Initialization type for the layers with weights (default is GlorotUniform) public NeuralNet(Initialization initialization = Initialization.GlorotUniform) @@ -32,7 +32,7 @@ public NeuralNet(Initialization initialization = Initialization.GlorotUniform) } /// - /// + /// /// /// NeuralNet(List layers) @@ -41,7 +41,7 @@ public NeuralNet(Initialization initialization = Initialization.GlorotUniform) } /// - /// + /// /// /// public void Add(ILayer layer) @@ -75,7 +75,7 @@ public void Add(ILayer layer) } /// - /// + /// /// /// public void Backward(Matrix delta) @@ -87,7 +87,7 @@ public void Backward(Matrix delta) } /// - /// + /// /// /// /// @@ -102,7 +102,6 @@ public Matrix Forward(Matrix input) return activation; } - /// /// Forwards each observations from input and stores the results in output. /// @@ -124,16 +123,16 @@ public void Forward(Matrix input, Matrix output) } /// - /// Initializes the layers in the neural net (Instantiates members and creates random initialization of weights). + /// Initializes the layers in the neural net (Instantiates members and creates random initialization of weights). /// /// /// public void Initialize(int batchSize, Random random) { - if (Layers.First() is not InputLayer) + if (Layers[0] is not InputLayer) { throw new ArgumentException("First layer must be InputLayer. Was: " + - Layers.First().GetType().Name); + Layers[0].GetType().Name); } if (Layers.Last() is not IOutputLayer) @@ -167,7 +166,7 @@ public List GetParametersAndGradients() /// public double[] GetRawVariableImportance() { - var inputlayer = Layers.First(); + var inputlayer = Layers[0]; return new double[inputlayer.Width * inputlayer.Height * inputlayer.Depth]; } @@ -201,7 +200,7 @@ public NeuralNet CopyNetForPredictionModel() } /// - /// Creates a neural net from already initialized layers. + /// Creates a neural net from already initialized layers. /// This means that layer.Initialize will not be called. /// /// diff --git a/src/SharpLearning.Neural/Models/RegressionNeuralNetModel.cs b/src/SharpLearning.Neural/Models/RegressionNeuralNetModel.cs index 2a94b2e8..5cb3dd98 100644 --- a/src/SharpLearning.Neural/Models/RegressionNeuralNetModel.cs +++ b/src/SharpLearning.Neural/Models/RegressionNeuralNetModel.cs @@ -9,7 +9,6 @@ namespace SharpLearning.Neural.Models; - /// /// Regression neural net model. /// @@ -99,7 +98,7 @@ public static RegressionNeuralNetModel Load(Func reader) var types = new Type[] { typeof(DenseVectorStorage), - typeof(DenseColumnMajorMatrixStorage) + typeof(DenseColumnMajorMatrixStorage), }; return new GenericXmlDataContractSerializer(types) @@ -115,7 +114,7 @@ public void Save(Func writer) var types = new Type[] { typeof(DenseVectorStorage), - typeof(DenseColumnMajorMatrixStorage) + typeof(DenseColumnMajorMatrixStorage), }; new GenericXmlDataContractSerializer(types) diff --git a/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs b/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs index 66fa4f3a..b1d207b1 100644 --- a/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs +++ b/src/SharpLearning.Neural/Optimizers/NeuralNetOptimizer.cs @@ -7,7 +7,7 @@ namespace SharpLearning.Neural.Optimizers; /// /// Neural net optimizer for controlling the weight updates in neural net learning. -/// uses mini-batch stochastic gradient descent. +/// uses mini-batch stochastic gradient descent. /// Several different optimization methods is available through the constructor. /// public sealed class NeuralNetOptimizer @@ -41,7 +41,7 @@ public sealed class NeuralNetOptimizer /// /// Neural net optimizer for controlling the weight updates in neural net learning. - /// uses mini-batch stochastic gradient descent. + /// uses mini-batch stochastic gradient descent. /// Several different optimization methods is available through the constructor. /// /// Controls the step size when updating the weights. (Default is 0.01) @@ -235,7 +235,7 @@ void UpdateParam(int i, float[] parameters, float[] gradients, double l2Decay, d break; case OptimizerMethod.Adagrad: { - gsumi[j] = gsumi[j] + gij * gij; + gsumi[j] += gij * gij; var dx = -m_learningRate * gij / Math.Sqrt(gsumi[j] + m_eps); parameters[j] += (float)dx; } diff --git a/src/SharpLearning.Neural/Optimizers/OptimizerMethod.cs b/src/SharpLearning.Neural/Optimizers/OptimizerMethod.cs index 2d4fbd67..568ae553 100644 --- a/src/SharpLearning.Neural/Optimizers/OptimizerMethod.cs +++ b/src/SharpLearning.Neural/Optimizers/OptimizerMethod.cs @@ -6,14 +6,14 @@ public enum OptimizerMethod { /// - /// Stochastic gradient descent. + /// Stochastic gradient descent. /// Recommended learning rate: 0.01. /// Sgd, /// - /// Adam (Adaptive Moment Estimation) is another method that computes adaptive learning rates for each parameter. - /// In addition to storing an exponentially decaying average of past squared gradients vtvt like Adadelta, + /// Adam (Adaptive Moment Estimation) is another method that computes adaptive learning rates for each parameter. + /// In addition to storing an exponentially decaying average of past squared gradients vtvt like Adadelta, /// Adam also keeps an exponentially decaying average of past gradients, similar to momentum. /// Essentially Adam is RMSProp with momentum. /// https://arxiv.org/pdf/1412.6980.pdf. @@ -36,7 +36,7 @@ public enum OptimizerMethod Nadam, /// - /// Adagrad adapts the learning rate to each parameter, performing larger updates for infrequent and smaller updates for frequent parameters. + /// Adagrad adapts the learning rate to each parameter, performing larger updates for infrequent and smaller updates for frequent parameters. /// For this reason, it is well-suited for dealing with sparse data: /// https://en.wikipedia.org/wiki/Stochastic_gradient_descent#AdaGrad. /// Recommended learning rate: 0.01. @@ -44,8 +44,8 @@ public enum OptimizerMethod Adagrad, /// - /// Adadelta is an extension of Adagrad that seeks to reduce its aggressive, - /// monotonically decreasing learning rate. + /// Adadelta is an extension of Adagrad that seeks to reduce its aggressive, + /// monotonically decreasing learning rate. /// Instead of accumulating all past squared gradients, Adadelta restricts the window of accumulated past gradients to some fixed size w. /// https://arxiv.org/pdf/1212.5701v1.pdf. /// Recommended learning rate: 1.0. @@ -63,5 +63,5 @@ public enum OptimizerMethod /// RMSprop and Adadelta have both been developed independently around the same time stemming from the need to resolve Adagrad's radically diminishing learning rates. /// Recommended learning rate: 0.001. /// - RMSProp + RMSProp, } diff --git a/src/SharpLearning.Neural/ParametersAndGradients.cs b/src/SharpLearning.Neural/ParametersAndGradients.cs index 6aa0aa05..b5ca707e 100644 --- a/src/SharpLearning.Neural/ParametersAndGradients.cs +++ b/src/SharpLearning.Neural/ParametersAndGradients.cs @@ -1,22 +1,22 @@ namespace SharpLearning.Neural; /// -/// +/// /// public class ParametersAndGradients { /// - /// + /// /// public readonly float[] Parameters; /// - /// + /// /// public readonly float[] Gradients; /// - /// + /// /// /// /// diff --git a/src/SharpLearning.Optimization.Test/ArrayAssert.cs b/src/SharpLearning.Optimization.Test/ArrayAssert.cs index dee15d56..df54113a 100644 --- a/src/SharpLearning.Optimization.Test/ArrayAssert.cs +++ b/src/SharpLearning.Optimization.Test/ArrayAssert.cs @@ -4,10 +4,10 @@ namespace SharpLearning.Optimization.Test; public static class ArrayAssert { - const double m_defaultDelta = 0.000001; + const double DefaultDelta = 0.000001; public static void AssertAreEqual(double[] expected, double[] actual, - double delta = m_defaultDelta) + double delta = DefaultDelta) { Assert.AreEqual(expected.Length, actual.Length); for (var i = 0; i < expected.Length; i++) diff --git a/src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs b/src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs index e37e0487..22b29468 100644 --- a/src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/BayesianOptimizerTest.cs @@ -70,19 +70,19 @@ public void BayesianOptimizer_Optimize(int? maxDegreeOfParallelism) var sut = CreateSut(maxDegreeOfParallelism, parameters); var results = sut.Optimize(MinimizeWeightFromHeight); - var actual = new OptimizerResult[] { results.First(), results.Last() }; + var actual = new OptimizerResult[] { results[0], results[^1] }; var expected = new OptimizerResult[] { new([90.513222660177036], 114559.43191955783), - new([41.752538896050559], 779.196560786838) + new([41.752538896050559], 779.196560786838), }; - Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); - Assert.AreEqual(expected.First().ParameterSet.First(), actual.First().ParameterSet.First(), Delta); + Assert.AreEqual(expected[0].Error, actual[0].Error, Delta); + Assert.AreEqual(expected[0].ParameterSet[0], actual[0].ParameterSet[0], Delta); - Assert.AreEqual(expected.Last().Error, actual.Last().Error, Delta); - Assert.AreEqual(expected.Last().ParameterSet.First(), actual.Last().ParameterSet.First(), Delta); + Assert.AreEqual(expected[^1].Error, actual[^1].Error, Delta); + Assert.AreEqual(expected[^1].ParameterSet[0], actual[^1].ParameterSet[0], Delta); } [TestMethod] @@ -149,52 +149,46 @@ public void BayesianOptimizer_OptimizeBest_MultipleParameters_Open_Loop_Using_Pr } [TestMethod] - [ExpectedException(typeof(ArgumentNullException))] public void BayesianOptimizer_ArgumentCheck_ParameterRanges() { - var sut = new BayesianOptimizer(null, 20); + Assert.ThrowsException(() => new BayesianOptimizer(null, 20)); } [TestMethod] - [ExpectedException(typeof(ArgumentException))] public void BayesianOptimizer_ArgumentCheck_Iterations() { - var sut = new BayesianOptimizer(new[] { new GridParameterSpec(0, 1, 2) }, - 0); + Assert.ThrowsException(() => + new BayesianOptimizer(new[] { new GridParameterSpec(0, 1, 2) }, 0)); } [TestMethod] - [ExpectedException(typeof(ArgumentException))] public void BayesianOptimizer_ArgumentCheck_RandomStartingPointCount() { - var sut = new BayesianOptimizer(new[] { new GridParameterSpec(0, 1, 2) }, - 10, 0); + Assert.ThrowsException(() => + new BayesianOptimizer(new[] { new GridParameterSpec(0, 1, 2) }, 10, 0)); } [TestMethod] - [ExpectedException(typeof(ArgumentException))] public void BayesianOptimizer_ArgumentCheck_FunctionEvaluationsPerIterationCount() { - var sut = new BayesianOptimizer(new[] { new GridParameterSpec(0, 1, 2) }, - 10, 20, 0); + Assert.ThrowsException(() => + new BayesianOptimizer(new[] { new GridParameterSpec(0, 1, 2) }, 10, 20, 0)); } [TestMethod] - [ExpectedException(typeof(ArgumentException))] public void BayesianOptimizer_ArgumentCheck_RandomSearchPointCount() { - var sut = new BayesianOptimizer(new[] { new GridParameterSpec(0, 1, 2) }, - 10, 20, 30, 0); + Assert.ThrowsException(() => + new BayesianOptimizer(new[] { new GridParameterSpec(0, 1, 2) }, 10, 20, 30, 0)); } static BayesianOptimizer CreateSut( int? maybeMaxDegreeOfParallelism, MinMaxParameterSpec[] parameters) { - const int DefaultMaxDegreeOfParallelism = -1; + const int defaultMaxDegreeOfParallelism = -1; - var maxDegreeOfParallelism = maybeMaxDegreeOfParallelism.HasValue ? - maybeMaxDegreeOfParallelism.Value : DefaultMaxDegreeOfParallelism; + var maxDegreeOfParallelism = maybeMaxDegreeOfParallelism ?? defaultMaxDegreeOfParallelism; var runParallel = maybeMaxDegreeOfParallelism.HasValue; diff --git a/src/SharpLearning.Optimization.Test/GlobalizedBoundedNelderMeadOptimizerTest.cs b/src/SharpLearning.Optimization.Test/GlobalizedBoundedNelderMeadOptimizerTest.cs index c2a7ecb4..8474cf23 100644 --- a/src/SharpLearning.Optimization.Test/GlobalizedBoundedNelderMeadOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/GlobalizedBoundedNelderMeadOptimizerTest.cs @@ -1,5 +1,4 @@ -using System.Linq; -using Microsoft.VisualStudio.TestTools.UnitTesting; +using Microsoft.VisualStudio.TestTools.UnitTesting; using static SharpLearning.Optimization.Test.ObjectiveUtilities; namespace SharpLearning.Optimization.Test; @@ -25,13 +24,12 @@ public void GlobalizedBoundedNelderMeadOptimizer_OptimizeBest(int? maxDegreeOfPa var actual = sut.OptimizeBest(Minimize); - Assert.AreEqual(expected: -0.99999960731425908, actual.Error, Delta); + Assert.AreEqual(expected: -0.99592339271458108, actual.Error, Delta); Assert.AreEqual(expected: 3, actual.ParameterSet.Length); - const double delta = 1e-3; - Assert.AreEqual(expected: -1.5711056814954487, actual.ParameterSet[0], delta); - Assert.AreEqual(expected: -6.283490634742785, actual.ParameterSet[1], delta); - Assert.AreEqual(expected: -2.9822323517533149E-07, actual.ParameterSet[2], delta); + Assert.AreEqual(expected: 7.9170034654971069, actual.ParameterSet[0], Delta); + Assert.AreEqual(expected: -3.1348067994029782, actual.ParameterSet[1], Delta); + Assert.AreEqual(expected: -0.0020768773583485015, actual.ParameterSet[2], Delta); } [TestMethod] @@ -49,35 +47,34 @@ public void GlobalizedBoundedNelderMeadOptimizer_Optimize(int? maxDegreeOfParall var sut = CreateSut(maxDegreeOfParallelism, parameters); var results = sut.Optimize(MinimizeWeightFromHeight); - var actual = new OptimizerResult[] { results.First(), results.Last() }; + var actual = new OptimizerResult[] { results[0], results[^1] }; var expected = new OptimizerResult[] { - new([37.71314634450421], 109.3438139631394), - new([37.713142445047254], 109.34381396345546) + new([37.71323726440562], 109.34381430968727), + new([37.713289997817874], 109.34381396345546), }; - Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); - Assert.AreEqual(expected.First().ParameterSet.First(), - actual.First().ParameterSet.First(), Delta); + Assert.AreEqual(expected[0].Error, actual[0].Error, Delta); + Assert.AreEqual(expected[0].ParameterSet[0], + actual[0].ParameterSet[0], Delta); - Assert.AreEqual(expected.Last().Error, actual.Last().Error, Delta); - Assert.AreEqual(expected.Last().ParameterSet.First(), - actual.Last().ParameterSet.First(), Delta); + Assert.AreEqual(expected[^1].Error, actual[^1].Error, Delta); + Assert.AreEqual(expected[^1].ParameterSet[0], + actual[^1].ParameterSet[0], Delta); } static GlobalizedBoundedNelderMeadOptimizer CreateSut( int? maybeMaxDegreeOfParallelism, MinMaxParameterSpec[] parameters) { - const int DefaultMaxDegreeOfParallelism = -1; + const int defaultMaxDegreeOfParallelism = -1; - var maxDegreeOfParallelism = maybeMaxDegreeOfParallelism.HasValue ? - maybeMaxDegreeOfParallelism.Value : DefaultMaxDegreeOfParallelism; + var maxDegreeOfParallelism = maybeMaxDegreeOfParallelism ?? defaultMaxDegreeOfParallelism; var sut = new GlobalizedBoundedNelderMeadOptimizer(parameters, maxRestarts: 50, - noImprovementThreshold: 1e-5, + noImprovementThreshold: 1e-1, maxIterationsWithoutImprovement: 10, maxIterationsPrRestart: 0, maxFunctionEvaluations: 0, diff --git a/src/SharpLearning.Optimization.Test/GridSearchOptimizationTest.cs b/src/SharpLearning.Optimization.Test/GridSearchOptimizationTest.cs index 19e80d95..c421a51a 100644 --- a/src/SharpLearning.Optimization.Test/GridSearchOptimizationTest.cs +++ b/src/SharpLearning.Optimization.Test/GridSearchOptimizationTest.cs @@ -1,5 +1,4 @@ using System; -using System.Linq; using Microsoft.VisualStudio.TestTools.UnitTesting; using static SharpLearning.Optimization.Test.ObjectiveUtilities; @@ -51,16 +50,16 @@ public void GridSearchOptimizer_Optimize(int? maxDegreeOfParallelism) var expected = new OptimizerResult[] { new([10], 31638.9579), - new([60], 20500.6279) + new([60], 20500.6279), }; - Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); - Assert.AreEqual(expected.First().ParameterSet.First(), - actual.First().ParameterSet.First(), Delta); + Assert.AreEqual(expected[0].Error, actual[0].Error, Delta); + Assert.AreEqual(expected[0].ParameterSet[0], + actual[0].ParameterSet[0], Delta); - Assert.AreEqual(expected.Last().Error, actual.Last().Error, Delta); - Assert.AreEqual(expected.Last().ParameterSet.First(), - actual.Last().ParameterSet.First(), Delta); + Assert.AreEqual(expected[^1].Error, actual[^1].Error, Delta); + Assert.AreEqual(expected[^1].ParameterSet[0], + actual[^1].ParameterSet[0], Delta); } [TestMethod] diff --git a/src/SharpLearning.Optimization.Test/HyperbandOptimizerTest.cs b/src/SharpLearning.Optimization.Test/HyperbandOptimizerTest.cs index 2afd696c..602082a6 100644 --- a/src/SharpLearning.Optimization.Test/HyperbandOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/HyperbandOptimizerTest.cs @@ -48,7 +48,7 @@ public void HyperbandOptimizer_Optimize() }; var random = new Random(343); - OptimizerResult minimize(double[] p, double r) + OptimizerResult Minimize(double[] p, double r) { var error = random.NextDouble(); return new OptimizerResult(p, error); @@ -61,7 +61,7 @@ OptimizerResult minimize(double[] p, double r) skipLastIterationOfEachRound: false, seed: 34); - var actual = sut.Optimize(minimize); + var actual = sut.Optimize(Minimize); AssertOptimizerResults(Expected, actual); } diff --git a/src/SharpLearning.Optimization.Test/ParameterBoundsTest.cs b/src/SharpLearning.Optimization.Test/ParameterBoundsTest.cs index 5b9bb5d6..da840494 100644 --- a/src/SharpLearning.Optimization.Test/ParameterBoundsTest.cs +++ b/src/SharpLearning.Optimization.Test/ParameterBoundsTest.cs @@ -32,7 +32,7 @@ public void ParameterBounds_NextValue() 63.8914499915631, 109.294177409864, 188.567149950455, - 33.2731248034505 + 33.2731248034505, }; Assert.AreEqual(expected.Length, actual.Length); diff --git a/src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs b/src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs index 1b967e72..c7b178bd 100644 --- a/src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/ParticleSwarmOptimizerTest.cs @@ -1,5 +1,4 @@ -using System.Linq; -using Microsoft.VisualStudio.TestTools.UnitTesting; +using Microsoft.VisualStudio.TestTools.UnitTesting; using static SharpLearning.Optimization.Test.ObjectiveUtilities; namespace SharpLearning.Optimization.Test; @@ -49,7 +48,7 @@ public void ParticleSwarmOptimizer_Optimize(int? maxDegreeOfParallelism) var results = sut.Optimize(MinimizeWeightFromHeight); - var actual = new OptimizerResult[] { results.First(), results.Last() }; + var actual = new OptimizerResult[] { results[0], results[^1] }; var expected = new OptimizerResult[] { @@ -57,23 +56,22 @@ public void ParticleSwarmOptimizer_Optimize(int? maxDegreeOfParallelism) new([37.2514904205637], 118.093289672808), }; - Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); - Assert.AreEqual(expected.First().ParameterSet.First(), - actual.First().ParameterSet.First(), Delta); + Assert.AreEqual(expected[0].Error, actual[0].Error, Delta); + Assert.AreEqual(expected[0].ParameterSet[0], + actual[0].ParameterSet[0], Delta); - Assert.AreEqual(expected.Last().Error, actual.Last().Error, Delta); - Assert.AreEqual(expected.Last().ParameterSet.First(), - actual.Last().ParameterSet.First(), Delta); + Assert.AreEqual(expected[^1].Error, actual[^1].Error, Delta); + Assert.AreEqual(expected[^1].ParameterSet[0], + actual[^1].ParameterSet[0], Delta); } static ParticleSwarmOptimizer CreateSut( int? maybeMaxDegreeOfParallelism, MinMaxParameterSpec[] parameters) { - const int DefaultMaxDegreeOfParallelism = -1; + const int defaultMaxDegreeOfParallelism = -1; - var maxDegreeOfParallelism = maybeMaxDegreeOfParallelism.HasValue ? - maybeMaxDegreeOfParallelism.Value : DefaultMaxDegreeOfParallelism; + var maxDegreeOfParallelism = maybeMaxDegreeOfParallelism ?? defaultMaxDegreeOfParallelism; var sut = new ParticleSwarmOptimizer(parameters, maxIterations: 100, diff --git a/src/SharpLearning.Optimization.Test/RandomSearchOptimizationTest.cs b/src/SharpLearning.Optimization.Test/RandomSearchOptimizationTest.cs index 70e70ff7..f4485529 100644 --- a/src/SharpLearning.Optimization.Test/RandomSearchOptimizationTest.cs +++ b/src/SharpLearning.Optimization.Test/RandomSearchOptimizationTest.cs @@ -54,13 +54,13 @@ public void RandomSearchOptimizer_Optimize(int? maxDegreeOfParallelism) new([19.1529422843144], 14251.396910816733), }; - Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); - Assert.AreEqual(expected.First().ParameterSet.First(), - actual.First().ParameterSet.First(), Delta); + Assert.AreEqual(expected[0].Error, actual[0].Error, Delta); + Assert.AreEqual(expected[0].ParameterSet[0], + actual[0].ParameterSet[0], Delta); - Assert.AreEqual(expected.Last().Error, actual.Last().Error, Delta); - Assert.AreEqual(expected.Last().ParameterSet.First(), - actual.Last().ParameterSet.First(), Delta); + Assert.AreEqual(expected[^1].Error, actual[^1].Error, Delta); + Assert.AreEqual(expected[^1].ParameterSet[0], + actual[^1].ParameterSet[0], Delta); } [TestMethod] diff --git a/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs b/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs index bcc71eab..626c5b43 100644 --- a/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs +++ b/src/SharpLearning.Optimization.Test/SmacOptimizerTest.cs @@ -65,11 +65,11 @@ public void SmacOptimizer_Optimize() new([41.8333740634068], 806.274612132759), }; - Assert.AreEqual(expected.First().Error, actual.First().Error, Delta); - Assert.AreEqual(expected.First().ParameterSet.First(), actual.First().ParameterSet.First(), Delta); + Assert.AreEqual(expected[0].Error, actual[0].Error, Delta); + Assert.AreEqual(expected[0].ParameterSet[0], actual[0].ParameterSet[0], Delta); - Assert.AreEqual(expected.Last().Error, actual.Last().Error, Delta); - Assert.AreEqual(expected.Last().ParameterSet.First(), actual.Last().ParameterSet.First(), Delta); + Assert.AreEqual(expected[^1].Error, actual[^1].Error, Delta); + Assert.AreEqual(expected[^1].ParameterSet[0], actual[^1].ParameterSet[0], Delta); } [TestMethod] diff --git a/src/SharpLearning.Optimization.Test/Transforms/Log10TransformTest.cs b/src/SharpLearning.Optimization.Test/Transforms/Log10TransformTest.cs index 2a19814b..fd9702c1 100644 --- a/src/SharpLearning.Optimization.Test/Transforms/Log10TransformTest.cs +++ b/src/SharpLearning.Optimization.Test/Transforms/Log10TransformTest.cs @@ -21,7 +21,7 @@ public void Log10Transform_Transform() parameterType: ParameterType.Continuous, sampler: sampler); } - var expected = new double[] { 0.00596229274859676, 0.000671250295495889, 0.000348781578382963, 0.00357552550811494, 0.0411440752926137, 0.012429636665806, 0.000944855847942692, 0.00964528475124291, 0.557104498829374, 0.000197223348905772, }; + var expected = new double[] { 0.00596229274859676, 0.000671250295495889, 0.000348781578382963, 0.00357552550811494, 0.0411440752926137, 0.012429636665806, 0.000944855847942692, 0.00964528475124291, 0.557104498829374, 0.000197223348905772 }; ArrayAssert.AssertAreEqual(expected, actual); } diff --git a/src/SharpLearning.Optimization/AcquisitionFunctions.cs b/src/SharpLearning.Optimization/AcquisitionFunctions.cs index d3d8cb42..bc9b4fa3 100644 --- a/src/SharpLearning.Optimization/AcquisitionFunctions.cs +++ b/src/SharpLearning.Optimization/AcquisitionFunctions.cs @@ -18,7 +18,7 @@ namespace SharpLearning.Optimization; public static class AcquisitionFunctions { /// - /// + /// /// /// Current best score. /// Predicted score. @@ -28,7 +28,10 @@ public static class AcquisitionFunctions public static double ExpectedImprovement(double currentScore, double mean, double variance, double xi = 0.0) { // in case of zero variance return 0.0. - if (variance == 0.0) return 0.0; + if (variance == 0.0) + { + return 0.0; + } var std = Math.Sqrt(variance); var z = (currentScore - mean - xi) / std; @@ -38,7 +41,7 @@ public static double ExpectedImprovement(double currentScore, double mean, doubl } /// - /// + /// /// /// Current best score. /// Predicted score. @@ -48,7 +51,10 @@ public static double ExpectedImprovement(double currentScore, double mean, doubl public static double ProbabilityOfImprovement(double currentScore, double mean, double variance, double xi = 0.0) { // in case of zero variance return 0.0. - if (variance == 0.0) return 0.0; + if (variance == 0.0) + { + return 0.0; + } var std = Math.Sqrt((double)variance); var z = (currentScore - mean - xi) / std; @@ -75,7 +81,10 @@ static double CumulativeDensityFunction(double x) // Save the sign of x var sign = 1; if (x < 0) + { sign = -1; + } + x = Math.Abs(x) / Math.Sqrt(2.0); // A&S formula 7.1.26 diff --git a/src/SharpLearning.Optimization/BayesianOptimizer.cs b/src/SharpLearning.Optimization/BayesianOptimizer.cs index 2159684e..d9fe8dd8 100644 --- a/src/SharpLearning.Optimization/BayesianOptimizer.cs +++ b/src/SharpLearning.Optimization/BayesianOptimizer.cs @@ -14,7 +14,7 @@ namespace SharpLearning.Optimization; /// /// Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial parameter sets and scores. /// This model is used to sample new promising parameter candidates which are evaluated and added to the existing parameter sets. -/// This process iterates several times. The method is computational expensive so is most relevant for expensive problems, +/// This process iterates several times. The method is computational expensive so is most relevant for expensive problems, /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method. /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods. /// Implementation loosely based on: @@ -43,7 +43,7 @@ public sealed class BayesianOptimizer : IOptimizer /// /// Bayesian optimization (BO) for global black box optimization problems. BO learns a model based on the initial parameter sets and scores. /// This model is used to sample new promising parameter candidates which are evaluated and added to the existing parameter sets. - /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems, + /// This process iterates several times. The method is computational expensive so is most relevant for expensive problems, /// where each evaluation of the function to minimize takes a long time, like hyper parameter tuning a machine learning method. /// But in that case it can usually reduce the number of iterations required to reach a good solution compared to less sophisticated methods. /// Implementation loosely based on: @@ -54,9 +54,9 @@ public sealed class BayesianOptimizer : IOptimizer /// A list of parameter specs, one for each optimization parameter /// The number of iterations to perform. /// Iteration * functionEvaluationsPerIteration = totalFunctionEvaluations - /// Number of randomly parameter sets used + /// Number of randomly parameter sets used /// for initialization (default is 20) - /// The number of function evaluations per iteration. + /// The number of function evaluations per iteration. /// The parameter sets are included in order of most promising outcome (default is 1) /// The number of random parameter sets /// used when maximizing the expected improvement acquisition function (default is 1000) @@ -76,14 +76,29 @@ public BayesianOptimizer(IParameterSpec[] parameters, { m_parameters = parameters ?? throw new ArgumentNullException(nameof(parameters)); - if (iterations < 1) throw new ArgumentException(nameof(iterations) + + if (iterations < 1) + { + throw new ArgumentException(nameof(iterations) + "must be at least 1. Was: " + iterations); - if (randomStartingPointCount < 1) throw new ArgumentException(nameof(randomStartingPointCount) + + } + + if (randomStartingPointCount < 1) + { + throw new ArgumentException(nameof(randomStartingPointCount) + "must be at least 1. Was: " + randomStartingPointCount); - if (functionEvaluationsPerIterationCount < 1) throw new ArgumentException(nameof(functionEvaluationsPerIterationCount) + + } + + if (functionEvaluationsPerIterationCount < 1) + { + throw new ArgumentException(nameof(functionEvaluationsPerIterationCount) + "must be at least 1. Was: " + functionEvaluationsPerIterationCount); - if (randomSearchPointCount < 1) throw new ArgumentException(nameof(randomSearchPointCount) + + } + + if (randomSearchPointCount < 1) + { + throw new ArgumentException(nameof(randomSearchPointCount) + "must be at least 1. Was: " + randomSearchPointCount); + } m_random = new Random(seed); // Use member to seed the random uniform sampler. @@ -181,14 +196,14 @@ public List RunParameterSets(Func fu /// Propose a new list of parameter sets. /// /// The number of parameter sets to propose - /// Results from previous runs. + /// Results from previous runs. /// These are used in the model for proposing new parameter sets. /// If no results are provided, random parameter sets will be returned. /// public double[][] ProposeParameterSets(int parameterSetCount, IReadOnlyList previousResults = null) { - var previousParameterSetCount = previousResults == null ? 0 : previousResults.Count; + var previousParameterSetCount = (previousResults?.Count) ?? 0; if (previousParameterSetCount < m_randomStartingPointsCount) { var randomParameterSetCount = Math.Min(parameterSetCount, diff --git a/src/SharpLearning.Optimization/GlobalizedBoundedNelderMeadOptimizer.cs b/src/SharpLearning.Optimization/GlobalizedBoundedNelderMeadOptimizer.cs index 918657e1..9ca93ede 100644 --- a/src/SharpLearning.Optimization/GlobalizedBoundedNelderMeadOptimizer.cs +++ b/src/SharpLearning.Optimization/GlobalizedBoundedNelderMeadOptimizer.cs @@ -9,8 +9,8 @@ namespace SharpLearning.Optimization; /// -/// Globalized bounded Nelder-Mead method. This version of Nelder-Mead optimization -/// avoids some of the shortcomings the standard implementation. +/// Globalized bounded Nelder-Mead method. This version of Nelder-Mead optimization +/// avoids some of the shortcomings the standard implementation. /// Specifically it is better suited for multi-modal optimization problems through its restart property. /// It also respect the bounds given by the provided parameter space. /// Roughly based on: @@ -36,8 +36,8 @@ public sealed class GlobalizedBoundedNelderMeadOptimizer : IOptimizer readonly int m_maxDegreeOfParallelism = -1; /// - /// Globalized bounded Nelder-Mead method. This version of Nelder-Mead optimization - /// avoids some of the shortcomings the standard implementation. + /// Globalized bounded Nelder-Mead method. This version of Nelder-Mead optimization + /// avoids some of the shortcomings the standard implementation. /// Specifically it is better suited for multi-modal optimization problems through its restart property. /// It also respect the bounds given by the provided parameter space. /// Roughly based on: @@ -102,7 +102,7 @@ public OptimizerResult OptimizeBest(Func functionToMi /// /// Optimization using Globalized bounded Nelder-Mead method. - /// Returns all results, chronologically ordered. + /// Returns all results, chronologically ordered. /// Note that the order of results might be affected if running parallel. /// /// @@ -134,13 +134,13 @@ public OptimizerResult[] Optimize(Func functionToMini var q = a * (Math.Sqrt(dim + 1) - 1) / (dim * Math.Sqrt(2)); var x = initialPoint.ToArray(); - x[i] = x[i] + p; + x[i] += p; for (var j = 0; j < dim; j++) { if (j != i) { - x[j] = x[j] + q; + x[j] += q; } } @@ -158,7 +158,7 @@ public OptimizerResult[] Optimize(Func functionToMini while (true) { results = results.OrderBy(r => r.Error).ToList(); - var best = results.First(); + var best = results[0]; // break after m_maxIterationsPrRestart if (iterations >= m_maxIterationsPrRestart && m_maxIterationsPrRestart != 0) @@ -213,7 +213,7 @@ public OptimizerResult[] Optimize(Func functionToMini BoundCheck(xr); var refelctionScore = EvaluateFunction(functionToMinimize, xr); - var first = results.First().Error; + var first = results[0].Error; if (first <= refelctionScore.Error && refelctionScore.Error < results[results.Count - 2].Error) { results.RemoveAt(results.Count - 1); @@ -298,7 +298,7 @@ void BoundCheck(double[] parameters) } /// - /// + /// /// /// void RandomRestartPoint(double[] newPoint) diff --git a/src/SharpLearning.Optimization/GridSearchOptimizer.cs b/src/SharpLearning.Optimization/GridSearchOptimizer.cs index 376e7d28..532d3da0 100644 --- a/src/SharpLearning.Optimization/GridSearchOptimizer.cs +++ b/src/SharpLearning.Optimization/GridSearchOptimizer.cs @@ -16,7 +16,7 @@ public sealed class GridSearchOptimizer : IOptimizer readonly ParallelOptions m_parallelOptions; /// - /// + /// /// /// A list of parameter specs, one for each optimization parameter /// Use multi threading to speed up execution (default is true) @@ -45,7 +45,7 @@ public OptimizerResult OptimizeBest(Func functionToMi /// /// Simple grid search that tries all combinations of the provided parameters. - /// Returns all results, chronologically ordered. + /// Returns all results, chronologically ordered. /// Note that the order of results might be affected if running parallel. /// /// @@ -88,7 +88,7 @@ static double[][] CartesianProduct(IParameterSpec[] sequences) static IEnumerable> CartesianProductEnumerable(IEnumerable> sequences) { - IEnumerable> emptyProduct = new[] { Enumerable.Empty() }; + IEnumerable> emptyProduct = [[]]; return sequences.Aggregate( emptyProduct, (accumulator, sequence) => diff --git a/src/SharpLearning.Optimization/HyperbandOptimizer.cs b/src/SharpLearning.Optimization/HyperbandOptimizer.cs index 1a9b652d..c54c0f1c 100644 --- a/src/SharpLearning.Optimization/HyperbandOptimizer.cs +++ b/src/SharpLearning.Optimization/HyperbandOptimizer.cs @@ -19,10 +19,10 @@ namespace SharpLearning.Optimization; /// https://arxiv.org/pdf/1603.06560.pdf /// Implementation based on: /// https://github.com/zygmuntz/hyperband -/// -/// Hyperband controls a budget of compute for each set of hyperparameters, -/// Initially it will run each parameter set with very little compute budget to get a taste of how they perform. -/// Then it takes the best performers and runs them on a larger budget. +/// +/// Hyperband controls a budget of compute for each set of hyperparameters, +/// Initially it will run each parameter set with very little compute budget to get a taste of how they perform. +/// Then it takes the best performers and runs them on a larger budget. /// public sealed class HyperbandOptimizer { @@ -39,19 +39,19 @@ public sealed class HyperbandOptimizer /// /// Hyperband optimizer based on: https://arxiv.org/pdf/1603.06560.pdf - /// - /// Hyperband controls a budget of compute for each set of hyperparameters, - /// Initially it will run each parameter set with very little compute budget to get a taste of how they perform. - /// Then it takes the best performers and runs them on a larger budget. + /// + /// Hyperband controls a budget of compute for each set of hyperparameters, + /// Initially it will run each parameter set with very little compute budget to get a taste of how they perform. + /// Then it takes the best performers and runs them on a larger budget. /// /// A list of parameter specs, one for each optimization parameter /// This provides the maximum budget. - /// One unit of compute could be 5 epochs over a dataset for instance. Consequently, - /// a unit of compute should be chosen to be the minimum amount of computation where different + /// One unit of compute could be 5 epochs over a dataset for instance. Consequently, + /// a unit of compute should be chosen to be the minimum amount of computation where different /// hyperparameter configurations start to separate (or where it is clear that some settings diverge)> /// Controls the proportion of configurations discarded in each round. /// Together with maximumUnitsOfCompute, it dictates how many rounds are considered - /// True to skip the last, + /// True to skip the last, /// most computationally expensive, iteration of each round. Default is false. /// public HyperbandOptimizer(IParameterSpec[] parameters, @@ -60,8 +60,16 @@ public HyperbandOptimizer(IParameterSpec[] parameters, int seed = 34) { m_parameters = parameters ?? throw new ArgumentNullException(nameof(parameters)); - if (maximumBudget < 1) throw new ArgumentException(nameof(maximumBudget) + " must be at larger than 0"); - if (eta < 1) throw new ArgumentException(nameof(eta) + " must be at larger than 0"); + if (maximumBudget < 1) + { + throw new ArgumentException(nameof(maximumBudget) + " must be at larger than 0"); + } + + if (eta < 1) + { + throw new ArgumentException(nameof(eta) + " must be at larger than 0"); + } + m_sampler = new RandomUniform(seed); // This is called R in the paper. diff --git a/src/SharpLearning.Optimization/IOptimizer.cs b/src/SharpLearning.Optimization/IOptimizer.cs index 3de27478..c397550e 100644 --- a/src/SharpLearning.Optimization/IOptimizer.cs +++ b/src/SharpLearning.Optimization/IOptimizer.cs @@ -3,7 +3,7 @@ namespace SharpLearning.Optimization; /// -/// +/// /// public interface IOptimizer { @@ -15,7 +15,7 @@ public interface IOptimizer OptimizerResult OptimizeBest(Func functionToMinimize); /// - /// Returns all results ordered from best to worst (minimized). + /// Returns all results ordered from best to worst (minimized). /// /// /// diff --git a/src/SharpLearning.Optimization/OptimizerResult.cs b/src/SharpLearning.Optimization/OptimizerResult.cs index 9eb780ed..19d750a3 100644 --- a/src/SharpLearning.Optimization/OptimizerResult.cs +++ b/src/SharpLearning.Optimization/OptimizerResult.cs @@ -8,11 +8,6 @@ namespace SharpLearning.Optimization; [Serializable] public sealed class OptimizerResult { - /// - /// - /// - /// - /// public OptimizerResult(double[] parameterSet, double error) { ParameterSet = parameterSet ?? throw new ArgumentNullException(nameof(parameterSet)); diff --git a/src/SharpLearning.Optimization/ParameterSamplers/IParameterSampler.cs b/src/SharpLearning.Optimization/ParameterSamplers/IParameterSampler.cs index e3df07d7..24e40f76 100644 --- a/src/SharpLearning.Optimization/ParameterSamplers/IParameterSampler.cs +++ b/src/SharpLearning.Optimization/ParameterSamplers/IParameterSampler.cs @@ -1,7 +1,7 @@ namespace SharpLearning.Optimization.ParameterSamplers; /// -/// Defines the interface for a parameter samplers. +/// Defines the interface for a parameter samplers. /// public interface IParameterSampler { diff --git a/src/SharpLearning.Optimization/ParameterSamplers/RandomUniform.cs b/src/SharpLearning.Optimization/ParameterSamplers/RandomUniform.cs index cb7c10fb..5246085d 100644 --- a/src/SharpLearning.Optimization/ParameterSamplers/RandomUniform.cs +++ b/src/SharpLearning.Optimization/ParameterSamplers/RandomUniform.cs @@ -3,14 +3,14 @@ namespace SharpLearning.Optimization.ParameterSamplers; /// -/// Sample values random uniformly between min and max. +/// Sample values random uniformly between min and max. /// public class RandomUniform : IParameterSampler { readonly Random m_random; /// - /// Sample values random uniformly between min and max. + /// Sample values random uniformly between min and max. /// /// public RandomUniform(int seed = 343) diff --git a/src/SharpLearning.Optimization/ParameterSpecs/GridParameterSpec.cs b/src/SharpLearning.Optimization/ParameterSpecs/GridParameterSpec.cs index 586d14e5..8613e807 100644 --- a/src/SharpLearning.Optimization/ParameterSpecs/GridParameterSpec.cs +++ b/src/SharpLearning.Optimization/ParameterSpecs/GridParameterSpec.cs @@ -14,7 +14,7 @@ public sealed class GridParameterSpec : IParameterSpec readonly int m_minIndex; readonly int m_maxIndex; - const ParameterType m_parameterType = ParameterType.Discrete; + const ParameterType ParameterTypeDiscrete = ParameterType.Discrete; /// /// GridParameterSpec, usable when a fixed set of parameters, @@ -46,7 +46,7 @@ public GridParameterSpec(params double[] parameters) public double SampleValue(IParameterSampler sampler) { // sample random parameter index. - var index = (int)sampler.Sample(m_minIndex, m_maxIndex, m_parameterType); + var index = (int)sampler.Sample(m_minIndex, m_maxIndex, ParameterTypeDiscrete); // return the values of the index. return m_parameters[index]; } diff --git a/src/SharpLearning.Optimization/ParticleSwarmOptimizer.cs b/src/SharpLearning.Optimization/ParticleSwarmOptimizer.cs index 9451fa80..c235e9b1 100644 --- a/src/SharpLearning.Optimization/ParticleSwarmOptimizer.cs +++ b/src/SharpLearning.Optimization/ParticleSwarmOptimizer.cs @@ -9,8 +9,8 @@ namespace SharpLearning.Optimization; /// /// Particle Swarm optimizer (PSO). PSO is initialized with a group of random particles -/// and then searches for optima by updating generations. In every iteration, each particle is updated by following two "best" values. -/// The first one is the best solution found by the specific particle so far. +/// and then searches for optima by updating generations. In every iteration, each particle is updated by following two "best" values. +/// The first one is the best solution found by the specific particle so far. /// The other "best" value is the global best value obtained by any particle in the population so far. /// http://www.swarmintelligence.org/tutorials.php /// https://en.wikipedia.org/wiki/Particle_swarm_optimization @@ -29,8 +29,8 @@ public sealed class ParticleSwarmOptimizer : IOptimizer /// /// Particle Swarm optimizer (PSO). PSO is initialized with a group of random particles - /// and then searches for optima by updating generations. In every iteration, each particle is updated by following two "best" values. - /// The first one is the best solution found by the specific particle so far. + /// and then searches for optima by updating generations. In every iteration, each particle is updated by following two "best" values. + /// The first one is the best solution found by the specific particle so far. /// The other "best" value is the global best value obtained by any particle in the population so far. /// /// A list of parameter specs, one for each optimization parameter @@ -156,7 +156,6 @@ public OptimizerResult[] Optimize(Func functionToMini //present[] = persent[] + v[] particles[i] = particles[i].Add(particleVelocities[i]); BoundCheck(particles[i], maxParameters, minParameters); - } } diff --git a/src/SharpLearning.Optimization/RandomSearchOptimizer.cs b/src/SharpLearning.Optimization/RandomSearchOptimizer.cs index 794c1dee..855d66c9 100644 --- a/src/SharpLearning.Optimization/RandomSearchOptimizer.cs +++ b/src/SharpLearning.Optimization/RandomSearchOptimizer.cs @@ -22,7 +22,7 @@ public sealed class RandomSearchOptimizer : IOptimizer /// Random search optimizer initializes random parameters between min and max of the provided parameters. /// Roughly based on: http://www.jmlr.org/papers/volume13/bergstra12a/bergstra12a.pdf /// - /// A list of parameter specs, one for each optimization parameter + /// A list of parameter specs, one for each optimization parameter /// The number of iterations to perform /// /// Use multi threading to speed up execution (default is true) @@ -55,7 +55,7 @@ public OptimizerResult OptimizeBest(Func functionToMi /// /// Random search optimizer initializes random parameters between min and max of the provided bounds. - /// Returns all results, chronologically ordered. + /// Returns all results, chronologically ordered. /// Note that the order of results might be affected if running parallel. /// /// diff --git a/src/SharpLearning.Optimization/SmacOptimizer.cs b/src/SharpLearning.Optimization/SmacOptimizer.cs index 55ec7d8c..d2ff2d68 100644 --- a/src/SharpLearning.Optimization/SmacOptimizer.cs +++ b/src/SharpLearning.Optimization/SmacOptimizer.cs @@ -44,11 +44,11 @@ public class SmacOptimizer : IOptimizer /// A list of parameter specs, one for each optimization parameter /// The number of iterations to perform. /// Iteration * functionEvaluationsPerIteration = totalFunctionEvaluations - /// Number of randomly parameter sets used + /// Number of randomly parameter sets used /// for initialization (default is 20) - /// The number of function evaluations per iteration. + /// The number of function evaluations per iteration. /// The parameter sets are included in order of most promising outcome (default is 1) - /// The number of top contenders + /// The number of top contenders /// to use in the greedy local search (default is (10) /// The number of random parameter sets /// used when maximizing the expected improvement acquisition function (default is 1000) @@ -65,16 +65,35 @@ public SmacOptimizer(IParameterSpec[] parameters, { m_parameters = parameters ?? throw new ArgumentNullException(nameof(parameters)); - if (iterations < 1) throw new ArgumentException(nameof(iterations) + + if (iterations < 1) + { + throw new ArgumentException(nameof(iterations) + "must be at least 1. Was: " + iterations); - if (randomStartingPointCount < 1) throw new ArgumentException(nameof(randomStartingPointCount) + + } + + if (randomStartingPointCount < 1) + { + throw new ArgumentException(nameof(randomStartingPointCount) + "must be at least 1. Was: " + randomStartingPointCount); - if (functionEvaluationsPerIterationCount < 1) throw new ArgumentException(nameof(functionEvaluationsPerIterationCount) + + } + + if (functionEvaluationsPerIterationCount < 1) + { + throw new ArgumentException(nameof(functionEvaluationsPerIterationCount) + "must be at least 1. Was: " + functionEvaluationsPerIterationCount); - if (localSearchPointCount < 1) throw new ArgumentException(nameof(localSearchPointCount) + + } + + if (localSearchPointCount < 1) + { + throw new ArgumentException(nameof(localSearchPointCount) + "must be at least 1. Was: " + localSearchPointCount); - if (randomSearchPointCount < 1) throw new ArgumentException(nameof(randomSearchPointCount) + + } + + if (randomSearchPointCount < 1) + { + throw new ArgumentException(nameof(randomSearchPointCount) + "must be at least 1. Was: " + randomSearchPointCount); + } m_random = new Random(seed); // Use member to seed the random uniform sampler. @@ -157,14 +176,14 @@ public static List RunParameterSets(Func /// The number of parameter sets to propose - /// Results from previous runs. + /// Results from previous runs. /// These are used in the model for proposing new parameter sets. /// If no results are provided, random parameter sets will be returned. /// public double[][] ProposeParameterSets(int parameterSetCount, IReadOnlyList previousResults = null) { - var previousParameterSetCount = previousResults == null ? 0 : previousResults.Count; + var previousParameterSetCount = (previousResults?.Count) ?? 0; if (previousParameterSetCount < m_randomStartingPointsCount) { var randomParameterSetCount = Math.Min(parameterSetCount, @@ -264,7 +283,7 @@ double[][] GreedyPlusRandomSearch(double[][] parentParameterSets, RegressionFore (double[] parameterSet, double expectedImprovement) LocalSearch(double[][] parentParameterSets, RegressionForestModel model, double bestScore, double epsilon) { - var bestParameterSet = parentParameterSets.First(); + var bestParameterSet = parentParameterSets[0]; var bestExpectedImprovement = ComputeExpectedImprovement(bestScore, bestParameterSet, model); // Continue search until no improvement is found. diff --git a/src/SharpLearning.Optimization/Transform.cs b/src/SharpLearning.Optimization/Transform.cs index 7a1db332..93df4e80 100644 --- a/src/SharpLearning.Optimization/Transform.cs +++ b/src/SharpLearning.Optimization/Transform.cs @@ -18,5 +18,5 @@ public enum Transform /// /// ExponentialAverage scale. For ranges close to one, like min: 0.9 and max: 0.999. /// - ExponentialAverage + ExponentialAverage, } diff --git a/src/SharpLearning.Optimization/Transforms/ExponentialAverageTransform.cs b/src/SharpLearning.Optimization/Transforms/ExponentialAverageTransform.cs index ac687841..03f2722f 100644 --- a/src/SharpLearning.Optimization/Transforms/ExponentialAverageTransform.cs +++ b/src/SharpLearning.Optimization/Transforms/ExponentialAverageTransform.cs @@ -15,8 +15,8 @@ public class ExponentialAverageTransform : ITransform /// /// /// - /// /// Selects the type of parameter. Should the parameter be sampled as discrete values, or as continous values. + /// /// public double Transform(double min, double max, ParameterType parameterType, IParameterSampler sampler) { diff --git a/src/SharpLearning.Optimization/Transforms/ITransform.cs b/src/SharpLearning.Optimization/Transforms/ITransform.cs index da7413db..cd655262 100644 --- a/src/SharpLearning.Optimization/Transforms/ITransform.cs +++ b/src/SharpLearning.Optimization/Transforms/ITransform.cs @@ -3,7 +3,7 @@ namespace SharpLearning.Optimization.Transforms; /// -/// Interface for transforms. +/// Interface for transforms. /// public interface ITransform { @@ -12,8 +12,8 @@ public interface ITransform /// /// /// - /// /// Selects the type of parameter. Should the parameter be sampled as discrete values, or as continous values. + /// /// double Transform(double min, double max, ParameterType parameterType, IParameterSampler sampler); } diff --git a/src/SharpLearning.Optimization/Transforms/LinearTransform.cs b/src/SharpLearning.Optimization/Transforms/LinearTransform.cs index ab67fcfa..766dab87 100644 --- a/src/SharpLearning.Optimization/Transforms/LinearTransform.cs +++ b/src/SharpLearning.Optimization/Transforms/LinearTransform.cs @@ -13,8 +13,8 @@ public class LinearTransform : ITransform /// /// /// - /// /// Selects the type of parameter. Should the parameter be sampled as discrete values, or as continous values. + /// /// public double Transform(double min, double max, ParameterType parameterType, IParameterSampler sampler) { diff --git a/src/SharpLearning.Optimization/Transforms/Log10Transform.cs b/src/SharpLearning.Optimization/Transforms/Log10Transform.cs index fed83c13..78fe4788 100644 --- a/src/SharpLearning.Optimization/Transforms/Log10Transform.cs +++ b/src/SharpLearning.Optimization/Transforms/Log10Transform.cs @@ -13,8 +13,8 @@ public class Log10Transform : ITransform /// /// /// - /// /// Selects the type of parameter. Should the parameter be sampled as discrete values, or as continous values. + /// /// public double Transform(double min, double max, ParameterType parameterType, IParameterSampler sampler) { diff --git a/src/SharpLearning.RandomForest.Test/DataSetUtilities.cs b/src/SharpLearning.RandomForest.Test/DataSetUtilities.cs index c6624667..bf95f6e7 100644 --- a/src/SharpLearning.RandomForest.Test/DataSetUtilities.cs +++ b/src/SharpLearning.RandomForest.Test/DataSetUtilities.cs @@ -478,5 +478,4 @@ public static (F64Matrix observations, double[] targets) LoadDecisionTreeDataSet 1.52065;14.36;0;2.02;73.42;0;8.44;1.64;0;7 1.51651;14.38;0;1.94;73.61;0;8.48;1.57;0;7 1.51711;14.23;0;2.08;73.36;0;8.62;1.67;0;7"; - } diff --git a/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs b/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs index 5285b89c..c93329fc 100644 --- a/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs +++ b/src/SharpLearning.RandomForest.Test/Models/ClassificationForestModelTest.cs @@ -98,7 +98,7 @@ public void ClassificationForestModel_PredictProbability_Single() Assert.AreEqual(0.076923076923076927, error, m_delta); - var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.650149027443145 }, { 1, 0.349850972556855 }, }), new(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 }, }), new(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 }, }), new(0, new Dictionary { { 0, 0.752781908451026 }, { 1, 0.247218091548974 }, }), new(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 }, }), new(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 }, }), new(1, new Dictionary { { 0, 0.491736055611056 }, { 1, 0.508263944388944 }, }), new(0, new Dictionary { { 0, 0.574583315377433 }, { 1, 0.425416684622567 }, }), new(0, new Dictionary { { 0, 0.838724674018791 }, { 1, 0.161275325981208 }, }), new(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 }, }), new(1, new Dictionary { { 0, 0.385258186258186 }, { 1, 0.614741813741813 }, }), new(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 }, }), new(0, new Dictionary { { 0, 0.706733044733045 }, { 1, 0.293266955266955 }, }), new(0, new Dictionary { { 0, 0.801266011766012 }, { 1, 0.198733988233988 }, }), new(1, new Dictionary { { 0, 0.294952297702298 }, { 1, 0.705047702297702 }, }), new(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 }, }), new(0, new Dictionary { { 0, 0.780062391856509 }, { 1, 0.21993760814349 }, }), new(0, new Dictionary { { 0, 0.554444388944389 }, { 1, 0.445555611055611 }, }), new(1, new Dictionary { { 0, 0.261349872349872 }, { 1, 0.738650127650127 }, }), new(1, new Dictionary { { 0, 0.419758186258186 }, { 1, 0.580241813741813 }, }), new(0, new Dictionary { { 0, 0.71382231249143 }, { 1, 0.28617768750857 }, }), new(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 }, }), new(1, new Dictionary { { 0, 0.47562148962149 }, { 1, 0.52437851037851 }, }), new(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 }, }), new(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 }, }), new(0, new Dictionary { { 0, 0.666244987039105 }, { 1, 0.333755012960895 }, }) }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.650149027443145 }, { 1, 0.349850972556855 } }), new(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 } }), new(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 } }), new(0, new Dictionary { { 0, 0.752781908451026 }, { 1, 0.247218091548974 } }), new(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 } }), new(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 } }), new(1, new Dictionary { { 0, 0.491736055611056 }, { 1, 0.508263944388944 } }), new(0, new Dictionary { { 0, 0.574583315377433 }, { 1, 0.425416684622567 } }), new(0, new Dictionary { { 0, 0.838724674018791 }, { 1, 0.161275325981208 } }), new(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 } }), new(1, new Dictionary { { 0, 0.385258186258186 }, { 1, 0.614741813741813 } }), new(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 } }), new(0, new Dictionary { { 0, 0.706733044733045 }, { 1, 0.293266955266955 } }), new(0, new Dictionary { { 0, 0.801266011766012 }, { 1, 0.198733988233988 } }), new(1, new Dictionary { { 0, 0.294952297702298 }, { 1, 0.705047702297702 } }), new(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 } }), new(0, new Dictionary { { 0, 0.780062391856509 }, { 1, 0.21993760814349 } }), new(0, new Dictionary { { 0, 0.554444388944389 }, { 1, 0.445555611055611 } }), new(1, new Dictionary { { 0, 0.261349872349872 }, { 1, 0.738650127650127 } }), new(1, new Dictionary { { 0, 0.419758186258186 }, { 1, 0.580241813741813 } }), new(0, new Dictionary { { 0, 0.71382231249143 }, { 1, 0.28617768750857 } }), new(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 } }), new(1, new Dictionary { { 0, 0.47562148962149 }, { 1, 0.52437851037851 } }), new(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 } }), new(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 } }), new(0, new Dictionary { { 0, 0.666244987039105 }, { 1, 0.333755012960895 } }) }; CollectionAssert.AreEqual(expected, actual); } @@ -116,7 +116,7 @@ public void ClassificationForestModel_PredictProbability_Multiple() Assert.AreEqual(0.076923076923076927, error, m_delta); - var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.650149027443145 }, { 1, 0.349850972556855 }, }), new(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 }, }), new(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 }, }), new(0, new Dictionary { { 0, 0.752781908451026 }, { 1, 0.247218091548974 }, }), new(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 }, }), new(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 }, }), new(1, new Dictionary { { 0, 0.491736055611056 }, { 1, 0.508263944388944 }, }), new(0, new Dictionary { { 0, 0.574583315377433 }, { 1, 0.425416684622567 }, }), new(0, new Dictionary { { 0, 0.838724674018791 }, { 1, 0.161275325981208 }, }), new(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 }, }), new(1, new Dictionary { { 0, 0.385258186258186 }, { 1, 0.614741813741813 }, }), new(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 }, }), new(0, new Dictionary { { 0, 0.706733044733045 }, { 1, 0.293266955266955 }, }), new(0, new Dictionary { { 0, 0.801266011766012 }, { 1, 0.198733988233988 }, }), new(1, new Dictionary { { 0, 0.294952297702298 }, { 1, 0.705047702297702 }, }), new(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 }, }), new(0, new Dictionary { { 0, 0.780062391856509 }, { 1, 0.21993760814349 }, }), new(0, new Dictionary { { 0, 0.554444388944389 }, { 1, 0.445555611055611 }, }), new(1, new Dictionary { { 0, 0.261349872349872 }, { 1, 0.738650127650127 }, }), new(1, new Dictionary { { 0, 0.419758186258186 }, { 1, 0.580241813741813 }, }), new(0, new Dictionary { { 0, 0.71382231249143 }, { 1, 0.28617768750857 }, }), new(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 }, }), new(1, new Dictionary { { 0, 0.47562148962149 }, { 1, 0.52437851037851 }, }), new(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 }, }), new(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 }, }), new(0, new Dictionary { { 0, 0.666244987039105 }, { 1, 0.333755012960895 }, }) }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.650149027443145 }, { 1, 0.349850972556855 } }), new(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 } }), new(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 } }), new(0, new Dictionary { { 0, 0.752781908451026 }, { 1, 0.247218091548974 } }), new(0, new Dictionary { { 0, 0.566943847818848 }, { 1, 0.433056152181152 } }), new(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 } }), new(1, new Dictionary { { 0, 0.491736055611056 }, { 1, 0.508263944388944 } }), new(0, new Dictionary { { 0, 0.574583315377433 }, { 1, 0.425416684622567 } }), new(0, new Dictionary { { 0, 0.838724674018791 }, { 1, 0.161275325981208 } }), new(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 } }), new(1, new Dictionary { { 0, 0.385258186258186 }, { 1, 0.614741813741813 } }), new(0, new Dictionary { { 0, 0.726936489980608 }, { 1, 0.273063510019392 } }), new(0, new Dictionary { { 0, 0.706733044733045 }, { 1, 0.293266955266955 } }), new(0, new Dictionary { { 0, 0.801266011766012 }, { 1, 0.198733988233988 } }), new(1, new Dictionary { { 0, 0.294952297702298 }, { 1, 0.705047702297702 } }), new(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 } }), new(0, new Dictionary { { 0, 0.780062391856509 }, { 1, 0.21993760814349 } }), new(0, new Dictionary { { 0, 0.554444388944389 }, { 1, 0.445555611055611 } }), new(1, new Dictionary { { 0, 0.261349872349872 }, { 1, 0.738650127650127 } }), new(1, new Dictionary { { 0, 0.419758186258186 }, { 1, 0.580241813741813 } }), new(0, new Dictionary { { 0, 0.71382231249143 }, { 1, 0.28617768750857 } }), new(1, new Dictionary { { 0, 0.241480824730825 }, { 1, 0.758519175269175 } }), new(1, new Dictionary { { 0, 0.47562148962149 }, { 1, 0.52437851037851 } }), new(0, new Dictionary { { 0, 0.821706914001031 }, { 1, 0.178293085998968 } }), new(0, new Dictionary { { 0, 0.792506836300954 }, { 1, 0.207493163699046 } }), new(0, new Dictionary { { 0, 0.666244987039105 }, { 1, 0.333755012960895 } }) }; CollectionAssert.AreEqual(expected, actual); } @@ -126,14 +126,14 @@ public void ClassificationForestModel_GetVariableImportance() var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, - { "PreviousExperience_month", 1 } }; + { "PreviousExperience_month", 1 }, }; var learner = new ClassificationRandomForestLearner(100, 5, 100, 1, 0.0001, 1.0, 42, false); var sut = learner.Learn(observations, targets); var actual = sut.GetVariableImportance(featureNameToIndex); var expected = new Dictionary { {"PreviousExperience_month", 100}, - {"AptitudeTestScore", 43.4356891141648 }}; + {"AptitudeTestScore", 43.4356891141648 },}; Assert.AreEqual(expected.Count, actual.Count); var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a }); diff --git a/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs b/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs index 099e90c2..ccc72556 100644 --- a/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs +++ b/src/SharpLearning.RandomForest.Test/Models/RegressionForestModelTest.cs @@ -74,7 +74,7 @@ public void RegressionForestModel_PredictCertainty_Single() Assert.AreEqual(0.15381141277554411, error, m_delta); - var expected = new CertaintyPrediction[] { new(0.379151515151515, 0.0608255007215007), new(0.411071351850763, 0.0831655436577049), new(0.243420918950331, 0.0452827034233046), new(0.302332251082251, 0.0699917594408057), new(0.411071351850763, 0.0831655436577049), new(0.175743762773174, 0.0354069437824887), new(0.574083361083361, 0.0765858693929188), new(0.259063776093188, 0.0491198812971218), new(0.163878898878899, 0.0331543420321184), new(0.671753996003996, 0.0624466591504497), new(0.418472943722944, 0.0607014359023913), new(0.243420918950331, 0.0452827034233046), new(0.443779942279942, 0.0941961872991865), new(0.156999361749362, 0.0435804333960299), new(0.591222034501446, 0.0873624628347336), new(0.123822406351818, 0.0283119805431255), new(0.162873993653405, 0.0333697457759022), new(0.596261932511932, 0.0695341060210394), new(0.671753996003996, 0.0624466591504497), new(0.418472943722944, 0.0607014359023913), new(0.329000027750028, 0.0788869852405852), new(0.671753996003996, 0.0624466591504497), new(0.499770375049787, 0.0913884936411888), new(0.140025508804921, 0.0309875116490099), new(0.161207326986739, 0.0336321035325246), new(0.389553418803419, 0.0744433596104835), }; + var expected = new CertaintyPrediction[] { new(0.379151515151515, 0.0608255007215007), new(0.411071351850763, 0.0831655436577049), new(0.243420918950331, 0.0452827034233046), new(0.302332251082251, 0.0699917594408057), new(0.411071351850763, 0.0831655436577049), new(0.175743762773174, 0.0354069437824887), new(0.574083361083361, 0.0765858693929188), new(0.259063776093188, 0.0491198812971218), new(0.163878898878899, 0.0331543420321184), new(0.671753996003996, 0.0624466591504497), new(0.418472943722944, 0.0607014359023913), new(0.243420918950331, 0.0452827034233046), new(0.443779942279942, 0.0941961872991865), new(0.156999361749362, 0.0435804333960299), new(0.591222034501446, 0.0873624628347336), new(0.123822406351818, 0.0283119805431255), new(0.162873993653405, 0.0333697457759022), new(0.596261932511932, 0.0695341060210394), new(0.671753996003996, 0.0624466591504497), new(0.418472943722944, 0.0607014359023913), new(0.329000027750028, 0.0788869852405852), new(0.671753996003996, 0.0624466591504497), new(0.499770375049787, 0.0913884936411888), new(0.140025508804921, 0.0309875116490099), new(0.161207326986739, 0.0336321035325246), new(0.389553418803419, 0.0744433596104835) }; CollectionAssert.AreEqual(expected, actual); } @@ -92,7 +92,7 @@ public void RegressionForestModel_PredictProbability_Multiple() Assert.AreEqual(0.15381141277554411, error, m_delta); - var expected = new CertaintyPrediction[] { new(0.379151515151515, 0.0608255007215007), new(0.411071351850763, 0.0831655436577049), new(0.243420918950331, 0.0452827034233046), new(0.302332251082251, 0.0699917594408057), new(0.411071351850763, 0.0831655436577049), new(0.175743762773174, 0.0354069437824887), new(0.574083361083361, 0.0765858693929188), new(0.259063776093188, 0.0491198812971218), new(0.163878898878899, 0.0331543420321184), new(0.671753996003996, 0.0624466591504497), new(0.418472943722944, 0.0607014359023913), new(0.243420918950331, 0.0452827034233046), new(0.443779942279942, 0.0941961872991865), new(0.156999361749362, 0.0435804333960299), new(0.591222034501446, 0.0873624628347336), new(0.123822406351818, 0.0283119805431255), new(0.162873993653405, 0.0333697457759022), new(0.596261932511932, 0.0695341060210394), new(0.671753996003996, 0.0624466591504497), new(0.418472943722944, 0.0607014359023913), new(0.329000027750028, 0.0788869852405852), new(0.671753996003996, 0.0624466591504497), new(0.499770375049787, 0.0913884936411888), new(0.140025508804921, 0.0309875116490099), new(0.161207326986739, 0.0336321035325246), new(0.389553418803419, 0.0744433596104835), }; + var expected = new CertaintyPrediction[] { new(0.379151515151515, 0.0608255007215007), new(0.411071351850763, 0.0831655436577049), new(0.243420918950331, 0.0452827034233046), new(0.302332251082251, 0.0699917594408057), new(0.411071351850763, 0.0831655436577049), new(0.175743762773174, 0.0354069437824887), new(0.574083361083361, 0.0765858693929188), new(0.259063776093188, 0.0491198812971218), new(0.163878898878899, 0.0331543420321184), new(0.671753996003996, 0.0624466591504497), new(0.418472943722944, 0.0607014359023913), new(0.243420918950331, 0.0452827034233046), new(0.443779942279942, 0.0941961872991865), new(0.156999361749362, 0.0435804333960299), new(0.591222034501446, 0.0873624628347336), new(0.123822406351818, 0.0283119805431255), new(0.162873993653405, 0.0333697457759022), new(0.596261932511932, 0.0695341060210394), new(0.671753996003996, 0.0624466591504497), new(0.418472943722944, 0.0607014359023913), new(0.329000027750028, 0.0788869852405852), new(0.671753996003996, 0.0624466591504497), new(0.499770375049787, 0.0913884936411888), new(0.140025508804921, 0.0309875116490099), new(0.161207326986739, 0.0336321035325246), new(0.389553418803419, 0.0744433596104835) }; CollectionAssert.AreEqual(expected, actual); } @@ -109,8 +109,7 @@ public void RegressionForestModel_Trees() for (var row = 0; row < rows; row++) { var observation = observations.Row(row); - predictions[row] = sut.Trees.Select(t => t.Predict(observation)) - .Average(); + predictions[row] = sut.Trees.Average(t => t.Predict(observation)); } var evaluator = new MeanSquaredErrorRegressionMetric(); @@ -125,14 +124,14 @@ public void RegressionForestModel_GetVariableImportance() var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet(); var featureNameToIndex = new Dictionary { { "AptitudeTestScore", 0 }, - { "PreviousExperience_month", 1 } }; + { "PreviousExperience_month", 1 }, }; var learner = new RegressionRandomForestLearner(100, 5, 100, 1, 0.0001, 1.0, 42, false); var sut = learner.Learn(observations, targets); var actual = sut.GetVariableImportance(featureNameToIndex); var expected = new Dictionary { {"PreviousExperience_month", 100}, - {"AptitudeTestScore", 42.3879919692465 }}; + {"AptitudeTestScore", 42.3879919692465 },}; Assert.AreEqual(expected.Count, actual.Count); var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a }); diff --git a/src/SharpLearning.RandomForest/Learners/ClassificationExtremelyRandomizedTreesLearner.cs b/src/SharpLearning.RandomForest/Learners/ClassificationExtremelyRandomizedTreesLearner.cs index 137b0b33..db92dbb7 100644 --- a/src/SharpLearning.RandomForest/Learners/ClassificationExtremelyRandomizedTreesLearner.cs +++ b/src/SharpLearning.RandomForest/Learners/ClassificationExtremelyRandomizedTreesLearner.cs @@ -34,7 +34,7 @@ public sealed class ClassificationExtremelyRandomizedTreesLearner readonly bool m_runParallel; /// - /// The extremely randomized trees learner is an ensemble learner consisting of a series of randomized decision trees. + /// The extremely randomized trees learner is an ensemble learner consisting of a series of randomized decision trees. /// It takes the randomization a step further than random forest and also select the splits randomly /// /// Number of trees to use in the ensemble @@ -42,8 +42,8 @@ public sealed class ClassificationExtremelyRandomizedTreesLearner /// The maximal tree depth before a leaf is generated /// Number of features used at each split in each tree /// The minimum improvement in information gain before a split is made - /// The ratio of observations sampled with replacement for each tree. - /// Default is 1.0 sampling the same count as the number of observations in the input. + /// The ratio of observations sampled with replacement for each tree. + /// Default is 1.0 sampling the same count as the number of observations in the input. /// If below 1.0 the algorithm changes to random patches /// Seed for the random number generator /// Use multi threading to speed up execution (default is true) diff --git a/src/SharpLearning.RandomForest/Learners/ClassificationRandomForestLearner.cs b/src/SharpLearning.RandomForest/Learners/ClassificationRandomForestLearner.cs index 11250d3d..65c2514f 100644 --- a/src/SharpLearning.RandomForest/Learners/ClassificationRandomForestLearner.cs +++ b/src/SharpLearning.RandomForest/Learners/ClassificationRandomForestLearner.cs @@ -40,8 +40,8 @@ public sealed class ClassificationRandomForestLearner /// The maximal tree depth before a leaf is generated /// Number of features used at each split in each tree /// The minimum improvement in information gain before a split is made - /// The ratio of observations sampled with replacement for each tree. - /// Default is 1.0 sampling the same count as the number of observations in the input. + /// The ratio of observations sampled with replacement for each tree. + /// Default is 1.0 sampling the same count as the number of observations in the input. /// If below 1.0 the algorithm changes to random patches /// Seed for the random number generator /// Use multi threading to speed up execution (default is true) diff --git a/src/SharpLearning.RandomForest/Learners/RegressionExtremelyRandomizedTreesLearner.cs b/src/SharpLearning.RandomForest/Learners/RegressionExtremelyRandomizedTreesLearner.cs index e4339e6c..1cefd834 100644 --- a/src/SharpLearning.RandomForest/Learners/RegressionExtremelyRandomizedTreesLearner.cs +++ b/src/SharpLearning.RandomForest/Learners/RegressionExtremelyRandomizedTreesLearner.cs @@ -30,7 +30,7 @@ public sealed class RegressionExtremelyRandomizedTreesLearner : IIndexedLearner< readonly bool m_runParallel; /// - /// The extremely randomized trees learner is an ensemble learner consisting of a series of randomized decision trees. + /// The extremely randomized trees learner is an ensemble learner consisting of a series of randomized decision trees. /// It takes the randomization a step futher than random forest and also select the splits randomly /// /// Number of trees to use in the ensemble @@ -38,8 +38,8 @@ public sealed class RegressionExtremelyRandomizedTreesLearner : IIndexedLearner< /// The maximal tree depth before a leaf is generated /// Number of features used at each split in each tree /// The minimum improvement in information gain before a split is made - /// The ratio of observations sampled with replacement for each tree. - /// Default is 1.0 sampling the same count as the number of observations in the input. + /// The ratio of observations sampled with replacement for each tree. + /// Default is 1.0 sampling the same count as the number of observations in the input. /// If below 1.0 the algorithm changes to random patches /// Seed for the random number generator /// Use multi threading to speed up execution (default is true) diff --git a/src/SharpLearning.RandomForest/Learners/RegressionRandomForestLearner.cs b/src/SharpLearning.RandomForest/Learners/RegressionRandomForestLearner.cs index 875d11ae..2d95337e 100644 --- a/src/SharpLearning.RandomForest/Learners/RegressionRandomForestLearner.cs +++ b/src/SharpLearning.RandomForest/Learners/RegressionRandomForestLearner.cs @@ -35,8 +35,8 @@ public sealed class RegressionRandomForestLearner : IIndexedLearner, ILe /// The maximal tree depth before a leaf is generated /// Number of features used at each split in each tree /// The minimum improvement in information gain before a split is made - /// The ratio of observations sampled with replacement for each tree. - /// Default is 1.0 sampling the same count as the number of observations in the input. + /// The ratio of observations sampled with replacement for each tree. + /// Default is 1.0 sampling the same count as the number of observations in the input. /// If below 1.0 the algorithm changes to random patches /// Seed for the random number generator /// Use multi threading to speed up execution (default is true) diff --git a/src/SharpLearning.RandomForest/Models/ClassificationForestModel.cs b/src/SharpLearning.RandomForest/Models/ClassificationForestModel.cs index d692fa24..927c2b32 100644 --- a/src/SharpLearning.RandomForest/Models/ClassificationForestModel.cs +++ b/src/SharpLearning.RandomForest/Models/ClassificationForestModel.cs @@ -25,8 +25,8 @@ public sealed class ClassificationForestModel : IPredictorModel, IPredic /// The summed variable importance from all decision trees public ClassificationForestModel(ClassificationDecisionTreeModel[] models, double[] rawVariableImportance) { - Trees = models ?? throw new ArgumentNullException("models"); - m_rawVariableImportance = rawVariableImportance ?? throw new ArgumentNullException("rawVariableImportance"); + Trees = models ?? throw new ArgumentNullException(nameof(models)); + m_rawVariableImportance = rawVariableImportance ?? throw new ArgumentNullException(nameof(rawVariableImportance)); } /// diff --git a/src/SharpLearning.RandomForest/Models/RegressionForestModel.cs b/src/SharpLearning.RandomForest/Models/RegressionForestModel.cs index 1aa64e11..36b2c2c7 100644 --- a/src/SharpLearning.RandomForest/Models/RegressionForestModel.cs +++ b/src/SharpLearning.RandomForest/Models/RegressionForestModel.cs @@ -41,8 +41,7 @@ public RegressionForestModel(RegressionDecisionTreeModel[] trees, double[] rawVa /// public double Predict(double[] observation) { - var prediction = Trees.Select(m => m.Predict(observation)) - .Average(); + var prediction = Trees.Average(m => m.Predict(observation)); return prediction; } diff --git a/src/SharpLearning.XGBoost.Test/ConversionsTest.cs b/src/SharpLearning.XGBoost.Test/ConversionsTest.cs index f531871d..605777da 100644 --- a/src/SharpLearning.XGBoost.Test/ConversionsTest.cs +++ b/src/SharpLearning.XGBoost.Test/ConversionsTest.cs @@ -13,7 +13,7 @@ public void Conversions_ToFloatJaggedArray() { 10, 11, 12, 13, - 14, 15 + 14, 15, }; var matrix = new F64Matrix(data, 3, 2); var actual = matrix.ToFloatJaggedArray(); @@ -34,7 +34,7 @@ public void Conversions_ToFloatJaggedArray_Indexed_All() { 10, 11, 12, 13, - 14, 15 + 14, 15, }; var matrix = new F64Matrix(data, 3, 2); var actual = matrix.ToFloatJaggedArray([0, 1, 2]); @@ -55,7 +55,7 @@ public void Conversions_ToFloatJaggedArray_Indexed() { 10, 11, 12, 13, - 14, 15 + 14, 15, }; var matrix = new F64Matrix(data, 3, 2); var actual = matrix.ToFloatJaggedArray([0, 2]); diff --git a/src/SharpLearning.XGBoost.Test/DataSetUtilities.cs b/src/SharpLearning.XGBoost.Test/DataSetUtilities.cs index f1c33870..e3a2c39d 100644 --- a/src/SharpLearning.XGBoost.Test/DataSetUtilities.cs +++ b/src/SharpLearning.XGBoost.Test/DataSetUtilities.cs @@ -478,5 +478,4 @@ public static (F64Matrix observations, double[] targets) LoadDecisionTreeDataSet 1.52065;14.36;0;2.02;73.42;0;8.44;1.64;0;5 1.51651;14.38;0;1.94;73.61;0;8.48;1.57;0;5 1.51711;14.23;0;2.08;73.36;0;8.62;1.67;0;5"; - } diff --git a/src/SharpLearning.XGBoost.Test/Models/ClassificationXGBoostModelTest.cs b/src/SharpLearning.XGBoost.Test/Models/ClassificationXGBoostModelTest.cs index d2b1e8b5..7b7699f3 100644 --- a/src/SharpLearning.XGBoost.Test/Models/ClassificationXGBoostModelTest.cs +++ b/src/SharpLearning.XGBoost.Test/Models/ClassificationXGBoostModelTest.cs @@ -97,7 +97,7 @@ public void ClassificationXGBoostModel_PredictProbability_Single() Assert.AreEqual(0.17757009345794392, error, m_delta); - var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.204421937465668 }, { 1, 0.165088519454002 }, { 2, 0.169509157538414 }, { 3, 0.15365232527256 }, { 4, 0.15364582836628 }, { 5, 0.153682202100754 }, }), new(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(1, new Dictionary { { 0, 0.171550869941711 }, { 1, 0.17407751083374 }, { 2, 0.168291121721268 }, { 3, 0.162018626928329 }, { 4, 0.162011757493019 }, { 5, 0.162050127983093 }, }), new(0, new Dictionary { { 0, 0.200587809085846 }, { 1, 0.200458511710167 }, { 2, 0.1540387570858 }, { 3, 0.1482974588871 }, { 4, 0.148291185498238 }, { 5, 0.14832629263401 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.209190428256989 }, { 1, 0.166168510913849 }, { 2, 0.160645022988319 }, { 3, 0.154657498002052 }, { 4, 0.154650956392288 }, { 5, 0.154687568545341 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.231333449482918 }, { 1, 0.158397167921066 }, { 2, 0.16797336935997 }, { 3, 0.147424504160881 }, { 4, 0.147418275475502 }, { 5, 0.147453173995018 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.222247675061226 }, { 1, 0.191451728343964 }, { 2, 0.161376118659973 }, { 3, 0.141634315252304 }, { 4, 0.141628324985504 }, { 5, 0.14166185259819 }, }), new(1, new Dictionary { { 0, 0.211069479584694 }, { 1, 0.217032581567764 }, { 2, 0.157594978809357 }, { 3, 0.134510651230812 }, { 4, 0.134504958987236 }, { 5, 0.145287364721298 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.221573829650879 }, { 1, 0.151714622974396 }, { 2, 0.20307545363903 }, { 3, 0.141204878687859 }, { 4, 0.141198918223381 }, { 5, 0.141232341527939 }, }), new(0, new Dictionary { { 0, 0.218772485852242 }, { 1, 0.18845808506012 }, { 2, 0.163346409797668 }, { 3, 0.13941964507103 }, { 4, 0.139413744211197 }, { 5, 0.150589644908905 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new(1, new Dictionary { { 0, 0.196985200047493 }, { 1, 0.201361879706383 }, { 2, 0.154732927680016 }, { 3, 0.148965761065483 }, { 4, 0.148959457874298 }, { 5, 0.148994728922844 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.22473056614399 }, { 1, 0.153876096010208 }, { 2, 0.157996505498886 }, { 3, 0.143216624855995 }, { 4, 0.143210560083389 }, { 5, 0.176969602704048 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.228362649679184 }, { 1, 0.156363025307655 }, { 2, 0.170506909489632 }, { 3, 0.14553128182888 }, { 4, 0.153676524758339 }, { 5, 0.145559579133987 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.231693357229233 }, { 1, 0.15864360332489 }, { 2, 0.162881851196289 }, { 3, 0.151451021432877 }, { 4, 0.147647619247437 }, { 5, 0.147682577371597 }, }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new(1, new Dictionary { { 0, 0.18250384926796 }, { 1, 0.232570126652718 }, { 2, 0.151334419846535 }, { 3, 0.15921525657177 }, { 4, 0.137171939015388 }, { 5, 0.137204423546791 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.185695022344589 }, { 1, 0.238080278038979 }, { 2, 0.154919907450676 }, { 3, 0.140427812933922 }, { 4, 0.140421867370605 }, { 5, 0.14045512676239 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 }, }), new(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.161344453692436 }, { 1, 0.186508595943451 }, { 2, 0.161158725619316 }, { 3, 0.160697221755981 }, { 4, 0.160690426826477 }, { 5, 0.169600605964661 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 }, }), new(1, new Dictionary { { 0, 0.154577597975731 }, { 1, 0.20810940861702 }, { 2, 0.175417006015778 }, { 3, 0.153957515954971 }, { 4, 0.15395100414753 }, { 5, 0.15398745238781 }, }), new(1, new Dictionary { { 0, 0.14681002497673 }, { 1, 0.247902169823647 }, { 2, 0.166602239012718 }, { 3, 0.146221116185188 }, { 4, 0.146214917302132 }, { 5, 0.146249532699585 }, }), new(1, new Dictionary { { 0, 0.149800211191177 }, { 1, 0.252951383590698 }, { 2, 0.149627774953842 }, { 3, 0.149199306964874 }, { 4, 0.149192988872528 }, { 5, 0.149228319525719 }, }), new(1, new Dictionary { { 0, 0.164128586649895 }, { 1, 0.175637125968933 }, { 2, 0.169798880815506 }, { 3, 0.163470193743706 }, { 4, 0.163463264703751 }, { 5, 0.163501977920532 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 }, }), new(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 }, }), new(1, new Dictionary { { 0, 0.142483577132225 }, { 1, 0.228974625468254 }, { 2, 0.142319530248642 }, { 3, 0.164056032896042 }, { 4, 0.14190599322319 }, { 5, 0.18026028573513 }, }), new(1, new Dictionary { { 0, 0.143185585737228 }, { 1, 0.230102762579918 }, { 2, 0.143020734190941 }, { 3, 0.16486431658268 }, { 4, 0.142605155706406 }, { 5, 0.176221489906311 }, }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 }, }), new(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 }, }), new(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 }, }), new(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 }, }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new(0, new Dictionary { { 0, 0.206467673182487 }, { 1, 0.166740626096725 }, { 2, 0.161198109388351 }, { 3, 0.155189976096153 }, { 4, 0.155183419585228 }, { 5, 0.155220150947571 }, }), new(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 }, }), new(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 }, }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.164174765348434 }, { 1, 0.209589347243309 }, { 2, 0.16105517745018 }, { 3, 0.155052363872528 }, { 4, 0.155045807361603 }, { 5, 0.155082508921623 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new(3, new Dictionary { { 0, 0.149210333824158 }, { 1, 0.200883388519287 }, { 2, 0.149038568139076 }, { 3, 0.203621536493301 }, { 4, 0.148605495691299 }, { 5, 0.148640677332878 }, }), new(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 }, }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new(0, new Dictionary { { 0, 0.205285787582397 }, { 1, 0.171510457992554 }, { 2, 0.160275369882584 }, { 3, 0.154301628470421 }, { 4, 0.154295101761818 }, { 5, 0.154331624507904 }, }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new(1, new Dictionary { { 0, 0.19794899225235 }, { 1, 0.201120212674141 }, { 2, 0.154547214508057 }, { 3, 0.148786976933479 }, { 4, 0.148780673742294 }, { 5, 0.148815900087357 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.198042631149292 }, { 1, 0.202442809939384 }, { 2, 0.150195524096489 }, { 3, 0.14976541697979 }, { 4, 0.149759083986282 }, { 5, 0.149794533848763 }, }), new(0, new Dictionary { { 0, 0.220375582575798 }, { 1, 0.189839035272598 }, { 2, 0.145878404378891 }, { 3, 0.16300305724144 }, { 4, 0.140435323119164 }, { 5, 0.140468567609787 }, }), new(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(1, new Dictionary { { 0, 0.157011136412621 }, { 1, 0.211385697126389 }, { 2, 0.162435546517372 }, { 3, 0.156381294131279 }, { 4, 0.156374678015709 }, { 5, 0.156411692500114 }, }), new(0, new Dictionary { { 0, 0.200586974620819 }, { 1, 0.161991447210312 }, { 2, 0.176644444465637 }, { 3, 0.150769785046577 }, { 4, 0.159208223223686 }, { 5, 0.150799110531807 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(2, new Dictionary { { 0, 0.192420691251755 }, { 1, 0.161742717027664 }, { 2, 0.209272593259811 }, { 3, 0.145513951778412 }, { 4, 0.145507797598839 }, { 5, 0.145542249083519 }, }), new(0, new Dictionary { { 0, 0.203436717391014 }, { 1, 0.165295079350471 }, { 2, 0.169711023569107 }, { 3, 0.153844580054283 }, { 4, 0.153838068246841 }, { 5, 0.15387450158596 }, }), new(0, new Dictionary { { 0, 0.202308386564255 }, { 1, 0.164378300309181 }, { 2, 0.174316108226776 }, { 3, 0.152991309762001 }, { 4, 0.152984827756882 }, { 5, 0.153021052479744 }, }), new(2, new Dictionary { { 0, 0.193465068936348 }, { 1, 0.15719299018383 }, { 2, 0.210408434271812 }, { 3, 0.146303743124008 }, { 4, 0.146297559142113 }, { 5, 0.146332189440727 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new(0, new Dictionary { { 0, 0.221237704157829 }, { 1, 0.190581694245338 }, { 2, 0.165187060832977 }, { 3, 0.140990674495697 }, { 4, 0.140984714031219 }, { 5, 0.141018092632294 }, }), new(0, new Dictionary { { 0, 0.216337636113167 }, { 1, 0.186360627412796 }, { 2, 0.161528438329697 }, { 3, 0.160016357898712 }, { 4, 0.137862130999565 }, { 5, 0.13789476454258 }, }), new(3, new Dictionary { { 0, 0.149128466844559 }, { 1, 0.163378983736038 }, { 2, 0.148956805467606 }, { 3, 0.206476286053658 }, { 4, 0.148523956537247 }, { 5, 0.183535546064377 }, }), new(3, new Dictionary { { 0, 0.156232386827469 }, { 1, 0.167187243700027 }, { 2, 0.156052529811859 }, { 3, 0.209292829036713 }, { 4, 0.15559908747673 }, { 5, 0.15563590824604 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.14455483853817 }, { 1, 0.154690876603127 }, { 2, 0.144388437271118 }, { 3, 0.229516208171844 }, { 4, 0.143968880176544 }, { 5, 0.182880714535713 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.153719380497932 }, { 1, 0.164498031139374 }, { 2, 0.153542414307594 }, { 3, 0.209774866700172 }, { 4, 0.153096258640289 }, { 5, 0.165369004011154 }, }), new(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 }, }), new(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 }, }), new(3, new Dictionary { { 0, 0.152724325656891 }, { 1, 0.160508275032043 }, { 2, 0.149818390607834 }, { 3, 0.238147541880608 }, { 4, 0.149383053183556 }, { 5, 0.14941842854023 }, }), new(1, new Dictionary { { 0, 0.152712091803551 }, { 1, 0.20559786260128 }, { 2, 0.152536302804947 }, { 3, 0.176534190773964 }, { 4, 0.152093067765236 }, { 5, 0.160526528954506 }, }), new(3, new Dictionary { { 0, 0.144403666257858 }, { 1, 0.194412112236023 }, { 2, 0.14423742890358 }, { 3, 0.229276165366173 }, { 4, 0.14381830394268 }, { 5, 0.143852367997169 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.152389481663704 }, { 1, 0.160156399011612 }, { 2, 0.149489924311638 }, { 3, 0.149061858654022 }, { 4, 0.23981149494648 }, { 5, 0.149090841412544 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.149491384625435 }, { 1, 0.163839146494865 }, { 2, 0.149319306015968 }, { 3, 0.148891717195511 }, { 4, 0.23953777551651 }, { 5, 0.148920670151711 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.142711848020554 }, { 1, 0.152718678116798 }, { 2, 0.142547562718391 }, { 3, 0.191180393099785 }, { 4, 0.228674560785294 }, { 5, 0.142167016863823 }, }), new(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 }, }), new(4, new Dictionary { { 0, 0.151802018284798 }, { 1, 0.163394033908844 }, { 2, 0.148913636803627 }, { 3, 0.148487210273743 }, { 4, 0.238887012004852 }, { 5, 0.148516088724136 }, }), new(5, new Dictionary { { 0, 0.14544840157032 }, { 1, 0.159347251057625 }, { 2, 0.145280972123146 }, { 3, 0.144864946603775 }, { 4, 0.144858822226524 }, { 5, 0.26019960641861 }, }), new(5, new Dictionary { { 0, 0.158301413059235 }, { 1, 0.17342846095562 }, { 2, 0.158119171857834 }, { 3, 0.157666385173798 }, { 4, 0.157659709453583 }, { 5, 0.194824859499931 }, }), new(0, new Dictionary { { 0, 0.219101145863533 }, { 1, 0.15002153813839 }, { 2, 0.200809195637703 }, { 3, 0.139629080891609 }, { 4, 0.139623180031776 }, { 5, 0.15081587433815 }, }), new(5, new Dictionary { { 0, 0.158076956868172 }, { 1, 0.169161155819893 }, { 2, 0.157894983887672 }, { 3, 0.157442837953568 }, { 4, 0.157436162233353 }, { 5, 0.199987947940826 }, }), new(5, new Dictionary { { 0, 0.161370471119881 }, { 1, 0.16959510743618 }, { 2, 0.158300027251244 }, { 3, 0.157846719026566 }, { 4, 0.157840043306351 }, { 5, 0.195047691464424 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.160495221614838 }, { 1, 0.168675243854523 }, { 2, 0.157441437244415 }, { 3, 0.156990587711334 }, { 4, 0.156983941793442 }, { 5, 0.199413493275642 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145015180110931 }, { 1, 0.158933326601982 }, { 2, 0.144848257303238 }, { 3, 0.144433453679085 }, { 4, 0.147345185279846 }, { 5, 0.259424567222595 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.146661177277565 }, { 1, 0.152337715029716 }, { 2, 0.1464923620224 }, { 3, 0.146072864532471 }, { 4, 0.146066680550575 }, { 5, 0.262369185686111 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.138151779770851 }, { 1, 0.147838860750198 }, { 2, 0.13799275457859 }, { 3, 0.191278502345085 }, { 4, 0.137591779232025 }, { 5, 0.24714632332325 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.204421937465668 }, { 1, 0.165088519454002 }, { 2, 0.169509157538414 }, { 3, 0.15365232527256 }, { 4, 0.15364582836628 }, { 5, 0.153682202100754 } }), new(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 } }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 } }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 } }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 } }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 } }), new(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 } }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(1, new Dictionary { { 0, 0.171550869941711 }, { 1, 0.17407751083374 }, { 2, 0.168291121721268 }, { 3, 0.162018626928329 }, { 4, 0.162011757493019 }, { 5, 0.162050127983093 } }), new(0, new Dictionary { { 0, 0.200587809085846 }, { 1, 0.200458511710167 }, { 2, 0.1540387570858 }, { 3, 0.1482974588871 }, { 4, 0.148291185498238 }, { 5, 0.14832629263401 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.209190428256989 }, { 1, 0.166168510913849 }, { 2, 0.160645022988319 }, { 3, 0.154657498002052 }, { 4, 0.154650956392288 }, { 5, 0.154687568545341 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 } }), new(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.231333449482918 }, { 1, 0.158397167921066 }, { 2, 0.16797336935997 }, { 3, 0.147424504160881 }, { 4, 0.147418275475502 }, { 5, 0.147453173995018 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 } }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 } }), new(0, new Dictionary { { 0, 0.222247675061226 }, { 1, 0.191451728343964 }, { 2, 0.161376118659973 }, { 3, 0.141634315252304 }, { 4, 0.141628324985504 }, { 5, 0.14166185259819 } }), new(1, new Dictionary { { 0, 0.211069479584694 }, { 1, 0.217032581567764 }, { 2, 0.157594978809357 }, { 3, 0.134510651230812 }, { 4, 0.134504958987236 }, { 5, 0.145287364721298 } }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 } }), new(0, new Dictionary { { 0, 0.221573829650879 }, { 1, 0.151714622974396 }, { 2, 0.20307545363903 }, { 3, 0.141204878687859 }, { 4, 0.141198918223381 }, { 5, 0.141232341527939 } }), new(0, new Dictionary { { 0, 0.218772485852242 }, { 1, 0.18845808506012 }, { 2, 0.163346409797668 }, { 3, 0.13941964507103 }, { 4, 0.139413744211197 }, { 5, 0.150589644908905 } }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 } }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 } }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 } }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 } }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 } }), new(1, new Dictionary { { 0, 0.196985200047493 }, { 1, 0.201361879706383 }, { 2, 0.154732927680016 }, { 3, 0.148965761065483 }, { 4, 0.148959457874298 }, { 5, 0.148994728922844 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 } }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.22473056614399 }, { 1, 0.153876096010208 }, { 2, 0.157996505498886 }, { 3, 0.143216624855995 }, { 4, 0.143210560083389 }, { 5, 0.176969602704048 } }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 } }), new(0, new Dictionary { { 0, 0.228362649679184 }, { 1, 0.156363025307655 }, { 2, 0.170506909489632 }, { 3, 0.14553128182888 }, { 4, 0.153676524758339 }, { 5, 0.145559579133987 } }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 } }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 } }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 } }), new(0, new Dictionary { { 0, 0.231693357229233 }, { 1, 0.15864360332489 }, { 2, 0.162881851196289 }, { 3, 0.151451021432877 }, { 4, 0.147647619247437 }, { 5, 0.147682577371597 } }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 } }), new(1, new Dictionary { { 0, 0.18250384926796 }, { 1, 0.232570126652718 }, { 2, 0.151334419846535 }, { 3, 0.15921525657177 }, { 4, 0.137171939015388 }, { 5, 0.137204423546791 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.185695022344589 }, { 1, 0.238080278038979 }, { 2, 0.154919907450676 }, { 3, 0.140427812933922 }, { 4, 0.140421867370605 }, { 5, 0.14045512676239 } }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 } }), new(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.161344453692436 }, { 1, 0.186508595943451 }, { 2, 0.161158725619316 }, { 3, 0.160697221755981 }, { 4, 0.160690426826477 }, { 5, 0.169600605964661 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 } }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 } }), new(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 } }), new(1, new Dictionary { { 0, 0.154577597975731 }, { 1, 0.20810940861702 }, { 2, 0.175417006015778 }, { 3, 0.153957515954971 }, { 4, 0.15395100414753 }, { 5, 0.15398745238781 } }), new(1, new Dictionary { { 0, 0.14681002497673 }, { 1, 0.247902169823647 }, { 2, 0.166602239012718 }, { 3, 0.146221116185188 }, { 4, 0.146214917302132 }, { 5, 0.146249532699585 } }), new(1, new Dictionary { { 0, 0.149800211191177 }, { 1, 0.252951383590698 }, { 2, 0.149627774953842 }, { 3, 0.149199306964874 }, { 4, 0.149192988872528 }, { 5, 0.149228319525719 } }), new(1, new Dictionary { { 0, 0.164128586649895 }, { 1, 0.175637125968933 }, { 2, 0.169798880815506 }, { 3, 0.163470193743706 }, { 4, 0.163463264703751 }, { 5, 0.163501977920532 } }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 } }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 } }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 } }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 } }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 } }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 } }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 } }), new(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 } }), new(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 } }), new(1, new Dictionary { { 0, 0.142483577132225 }, { 1, 0.228974625468254 }, { 2, 0.142319530248642 }, { 3, 0.164056032896042 }, { 4, 0.14190599322319 }, { 5, 0.18026028573513 } }), new(1, new Dictionary { { 0, 0.143185585737228 }, { 1, 0.230102762579918 }, { 2, 0.143020734190941 }, { 3, 0.16486431658268 }, { 4, 0.142605155706406 }, { 5, 0.176221489906311 } }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 } }), new(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 } }), new(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 } }), new(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 } }), new(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 } }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 } }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 } }), new(0, new Dictionary { { 0, 0.206467673182487 }, { 1, 0.166740626096725 }, { 2, 0.161198109388351 }, { 3, 0.155189976096153 }, { 4, 0.155183419585228 }, { 5, 0.155220150947571 } }), new(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 } }), new(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 } }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(1, new Dictionary { { 0, 0.164174765348434 }, { 1, 0.209589347243309 }, { 2, 0.16105517745018 }, { 3, 0.155052363872528 }, { 4, 0.155045807361603 }, { 5, 0.155082508921623 } }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 } }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 } }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 } }), new(3, new Dictionary { { 0, 0.149210333824158 }, { 1, 0.200883388519287 }, { 2, 0.149038568139076 }, { 3, 0.203621536493301 }, { 4, 0.148605495691299 }, { 5, 0.148640677332878 } }), new(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 } }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 } }), new(0, new Dictionary { { 0, 0.205285787582397 }, { 1, 0.171510457992554 }, { 2, 0.160275369882584 }, { 3, 0.154301628470421 }, { 4, 0.154295101761818 }, { 5, 0.154331624507904 } }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 } }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 } }), new(1, new Dictionary { { 0, 0.19794899225235 }, { 1, 0.201120212674141 }, { 2, 0.154547214508057 }, { 3, 0.148786976933479 }, { 4, 0.148780673742294 }, { 5, 0.148815900087357 } }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.198042631149292 }, { 1, 0.202442809939384 }, { 2, 0.150195524096489 }, { 3, 0.14976541697979 }, { 4, 0.149759083986282 }, { 5, 0.149794533848763 } }), new(0, new Dictionary { { 0, 0.220375582575798 }, { 1, 0.189839035272598 }, { 2, 0.145878404378891 }, { 3, 0.16300305724144 }, { 4, 0.140435323119164 }, { 5, 0.140468567609787 } }), new(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 } }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 } }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 } }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 } }), new(1, new Dictionary { { 0, 0.157011136412621 }, { 1, 0.211385697126389 }, { 2, 0.162435546517372 }, { 3, 0.156381294131279 }, { 4, 0.156374678015709 }, { 5, 0.156411692500114 } }), new(0, new Dictionary { { 0, 0.200586974620819 }, { 1, 0.161991447210312 }, { 2, 0.176644444465637 }, { 3, 0.150769785046577 }, { 4, 0.159208223223686 }, { 5, 0.150799110531807 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(2, new Dictionary { { 0, 0.192420691251755 }, { 1, 0.161742717027664 }, { 2, 0.209272593259811 }, { 3, 0.145513951778412 }, { 4, 0.145507797598839 }, { 5, 0.145542249083519 } }), new(0, new Dictionary { { 0, 0.203436717391014 }, { 1, 0.165295079350471 }, { 2, 0.169711023569107 }, { 3, 0.153844580054283 }, { 4, 0.153838068246841 }, { 5, 0.15387450158596 } }), new(0, new Dictionary { { 0, 0.202308386564255 }, { 1, 0.164378300309181 }, { 2, 0.174316108226776 }, { 3, 0.152991309762001 }, { 4, 0.152984827756882 }, { 5, 0.153021052479744 } }), new(2, new Dictionary { { 0, 0.193465068936348 }, { 1, 0.15719299018383 }, { 2, 0.210408434271812 }, { 3, 0.146303743124008 }, { 4, 0.146297559142113 }, { 5, 0.146332189440727 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 } }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 } }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 } }), new(0, new Dictionary { { 0, 0.221237704157829 }, { 1, 0.190581694245338 }, { 2, 0.165187060832977 }, { 3, 0.140990674495697 }, { 4, 0.140984714031219 }, { 5, 0.141018092632294 } }), new(0, new Dictionary { { 0, 0.216337636113167 }, { 1, 0.186360627412796 }, { 2, 0.161528438329697 }, { 3, 0.160016357898712 }, { 4, 0.137862130999565 }, { 5, 0.13789476454258 } }), new(3, new Dictionary { { 0, 0.149128466844559 }, { 1, 0.163378983736038 }, { 2, 0.148956805467606 }, { 3, 0.206476286053658 }, { 4, 0.148523956537247 }, { 5, 0.183535546064377 } }), new(3, new Dictionary { { 0, 0.156232386827469 }, { 1, 0.167187243700027 }, { 2, 0.156052529811859 }, { 3, 0.209292829036713 }, { 4, 0.15559908747673 }, { 5, 0.15563590824604 } }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 } }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 } }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 } }), new(3, new Dictionary { { 0, 0.14455483853817 }, { 1, 0.154690876603127 }, { 2, 0.144388437271118 }, { 3, 0.229516208171844 }, { 4, 0.143968880176544 }, { 5, 0.182880714535713 } }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 } }), new(3, new Dictionary { { 0, 0.153719380497932 }, { 1, 0.164498031139374 }, { 2, 0.153542414307594 }, { 3, 0.209774866700172 }, { 4, 0.153096258640289 }, { 5, 0.165369004011154 } }), new(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 } }), new(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 } }), new(3, new Dictionary { { 0, 0.152724325656891 }, { 1, 0.160508275032043 }, { 2, 0.149818390607834 }, { 3, 0.238147541880608 }, { 4, 0.149383053183556 }, { 5, 0.14941842854023 } }), new(1, new Dictionary { { 0, 0.152712091803551 }, { 1, 0.20559786260128 }, { 2, 0.152536302804947 }, { 3, 0.176534190773964 }, { 4, 0.152093067765236 }, { 5, 0.160526528954506 } }), new(3, new Dictionary { { 0, 0.144403666257858 }, { 1, 0.194412112236023 }, { 2, 0.14423742890358 }, { 3, 0.229276165366173 }, { 4, 0.14381830394268 }, { 5, 0.143852367997169 } }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 } }), new(4, new Dictionary { { 0, 0.152389481663704 }, { 1, 0.160156399011612 }, { 2, 0.149489924311638 }, { 3, 0.149061858654022 }, { 4, 0.23981149494648 }, { 5, 0.149090841412544 } }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 } }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 } }), new(4, new Dictionary { { 0, 0.149491384625435 }, { 1, 0.163839146494865 }, { 2, 0.149319306015968 }, { 3, 0.148891717195511 }, { 4, 0.23953777551651 }, { 5, 0.148920670151711 } }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 } }), new(4, new Dictionary { { 0, 0.142711848020554 }, { 1, 0.152718678116798 }, { 2, 0.142547562718391 }, { 3, 0.191180393099785 }, { 4, 0.228674560785294 }, { 5, 0.142167016863823 } }), new(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 } }), new(4, new Dictionary { { 0, 0.151802018284798 }, { 1, 0.163394033908844 }, { 2, 0.148913636803627 }, { 3, 0.148487210273743 }, { 4, 0.238887012004852 }, { 5, 0.148516088724136 } }), new(5, new Dictionary { { 0, 0.14544840157032 }, { 1, 0.159347251057625 }, { 2, 0.145280972123146 }, { 3, 0.144864946603775 }, { 4, 0.144858822226524 }, { 5, 0.26019960641861 } }), new(5, new Dictionary { { 0, 0.158301413059235 }, { 1, 0.17342846095562 }, { 2, 0.158119171857834 }, { 3, 0.157666385173798 }, { 4, 0.157659709453583 }, { 5, 0.194824859499931 } }), new(0, new Dictionary { { 0, 0.219101145863533 }, { 1, 0.15002153813839 }, { 2, 0.200809195637703 }, { 3, 0.139629080891609 }, { 4, 0.139623180031776 }, { 5, 0.15081587433815 } }), new(5, new Dictionary { { 0, 0.158076956868172 }, { 1, 0.169161155819893 }, { 2, 0.157894983887672 }, { 3, 0.157442837953568 }, { 4, 0.157436162233353 }, { 5, 0.199987947940826 } }), new(5, new Dictionary { { 0, 0.161370471119881 }, { 1, 0.16959510743618 }, { 2, 0.158300027251244 }, { 3, 0.157846719026566 }, { 4, 0.157840043306351 }, { 5, 0.195047691464424 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 } }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 } }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.160495221614838 }, { 1, 0.168675243854523 }, { 2, 0.157441437244415 }, { 3, 0.156990587711334 }, { 4, 0.156983941793442 }, { 5, 0.199413493275642 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145015180110931 }, { 1, 0.158933326601982 }, { 2, 0.144848257303238 }, { 3, 0.144433453679085 }, { 4, 0.147345185279846 }, { 5, 0.259424567222595 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.146661177277565 }, { 1, 0.152337715029716 }, { 2, 0.1464923620224 }, { 3, 0.146072864532471 }, { 4, 0.146066680550575 }, { 5, 0.262369185686111 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.138151779770851 }, { 1, 0.147838860750198 }, { 2, 0.13799275457859 }, { 3, 0.191278502345085 }, { 4, 0.137591779232025 }, { 5, 0.24714632332325 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }) }; CollectionAssert.AreEqual(expected, actual); } @@ -125,7 +125,7 @@ public void ClassificationXGBoostModel_PredictProbability_Single_BinaryLogistic( Assert.AreEqual(0.14953271028037382, error, m_delta); - var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.505768656730652 }, { 1, 0.494231373071671 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.505768656730652 }, { 1, 0.494231373071671 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.505768656730652 }, { 1, 0.494231373071671 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.427482306957245 }, { 1, 0.572517693042755 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.436939477920532 }, { 1, 0.563060522079468 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.436939477920532 }, { 1, 0.563060522079468 }, }), new(1, new Dictionary { { 0, 0.436939477920532 }, { 1, 0.563060522079468 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.455988764762878 }, { 1, 0.544011235237122 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.455988764762878 }, { 1, 0.544011235237122 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.455988764762878 }, { 1, 0.544011235237122 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.427482306957245 }, { 1, 0.572517693042755 }, }), new(1, new Dictionary { { 0, 0.427482306957245 }, { 1, 0.572517693042755 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.505768656730652 }, { 1, 0.494231373071671 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.505768656730652 }, { 1, 0.494231373071671 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(0, new Dictionary { { 0, 0.505768656730652 }, { 1, 0.494231373071671 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.427482306957245 }, { 1, 0.572517693042755 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.436939477920532 }, { 1, 0.563060522079468 } }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.436939477920532 }, { 1, 0.563060522079468 } }), new(1, new Dictionary { { 0, 0.436939477920532 }, { 1, 0.563060522079468 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.455988764762878 }, { 1, 0.544011235237122 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.455988764762878 }, { 1, 0.544011235237122 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.463839888572693 }, { 1, 0.536160111427307 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.455988764762878 }, { 1, 0.544011235237122 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(1, new Dictionary { { 0, 0.457355201244354 }, { 1, 0.542644798755646 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 } }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 } }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 } }), new(1, new Dictionary { { 0, 0.424107909202576 }, { 1, 0.575892090797424 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(1, new Dictionary { { 0, 0.427482306957245 }, { 1, 0.572517693042755 } }), new(1, new Dictionary { { 0, 0.427482306957245 }, { 1, 0.572517693042755 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(0, new Dictionary { { 0, 0.536582946777344 }, { 1, 0.463417023420334 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.425627827644348 }, { 1, 0.574372172355652 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }), new(1, new Dictionary { { 0, 0.414723873138428 }, { 1, 0.585276126861572 } }) }; CollectionAssert.AreEqual(expected, actual); } @@ -143,7 +143,7 @@ public void ClassificationXGBoostModel_PredictProbability_Multiple() Assert.AreEqual(0.17757009345794392, error, m_delta); - var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.204421937465668 }, { 1, 0.165088519454002 }, { 2, 0.169509157538414 }, { 3, 0.15365232527256 }, { 4, 0.15364582836628 }, { 5, 0.153682202100754 }, }), new(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(1, new Dictionary { { 0, 0.171550869941711 }, { 1, 0.17407751083374 }, { 2, 0.168291121721268 }, { 3, 0.162018626928329 }, { 4, 0.162011757493019 }, { 5, 0.162050127983093 }, }), new(0, new Dictionary { { 0, 0.200587809085846 }, { 1, 0.200458511710167 }, { 2, 0.1540387570858 }, { 3, 0.1482974588871 }, { 4, 0.148291185498238 }, { 5, 0.14832629263401 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.209190428256989 }, { 1, 0.166168510913849 }, { 2, 0.160645022988319 }, { 3, 0.154657498002052 }, { 4, 0.154650956392288 }, { 5, 0.154687568545341 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.231333449482918 }, { 1, 0.158397167921066 }, { 2, 0.16797336935997 }, { 3, 0.147424504160881 }, { 4, 0.147418275475502 }, { 5, 0.147453173995018 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.222247675061226 }, { 1, 0.191451728343964 }, { 2, 0.161376118659973 }, { 3, 0.141634315252304 }, { 4, 0.141628324985504 }, { 5, 0.14166185259819 }, }), new(1, new Dictionary { { 0, 0.211069479584694 }, { 1, 0.217032581567764 }, { 2, 0.157594978809357 }, { 3, 0.134510651230812 }, { 4, 0.134504958987236 }, { 5, 0.145287364721298 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.221573829650879 }, { 1, 0.151714622974396 }, { 2, 0.20307545363903 }, { 3, 0.141204878687859 }, { 4, 0.141198918223381 }, { 5, 0.141232341527939 }, }), new(0, new Dictionary { { 0, 0.218772485852242 }, { 1, 0.18845808506012 }, { 2, 0.163346409797668 }, { 3, 0.13941964507103 }, { 4, 0.139413744211197 }, { 5, 0.150589644908905 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 }, }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new(1, new Dictionary { { 0, 0.196985200047493 }, { 1, 0.201361879706383 }, { 2, 0.154732927680016 }, { 3, 0.148965761065483 }, { 4, 0.148959457874298 }, { 5, 0.148994728922844 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(0, new Dictionary { { 0, 0.22473056614399 }, { 1, 0.153876096010208 }, { 2, 0.157996505498886 }, { 3, 0.143216624855995 }, { 4, 0.143210560083389 }, { 5, 0.176969602704048 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.228362649679184 }, { 1, 0.156363025307655 }, { 2, 0.170506909489632 }, { 3, 0.14553128182888 }, { 4, 0.153676524758339 }, { 5, 0.145559579133987 }, }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.231693357229233 }, { 1, 0.15864360332489 }, { 2, 0.162881851196289 }, { 3, 0.151451021432877 }, { 4, 0.147647619247437 }, { 5, 0.147682577371597 }, }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new(1, new Dictionary { { 0, 0.18250384926796 }, { 1, 0.232570126652718 }, { 2, 0.151334419846535 }, { 3, 0.15921525657177 }, { 4, 0.137171939015388 }, { 5, 0.137204423546791 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.185695022344589 }, { 1, 0.238080278038979 }, { 2, 0.154919907450676 }, { 3, 0.140427812933922 }, { 4, 0.140421867370605 }, { 5, 0.14045512676239 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 }, }), new(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.161344453692436 }, { 1, 0.186508595943451 }, { 2, 0.161158725619316 }, { 3, 0.160697221755981 }, { 4, 0.160690426826477 }, { 5, 0.169600605964661 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 }, }), new(1, new Dictionary { { 0, 0.154577597975731 }, { 1, 0.20810940861702 }, { 2, 0.175417006015778 }, { 3, 0.153957515954971 }, { 4, 0.15395100414753 }, { 5, 0.15398745238781 }, }), new(1, new Dictionary { { 0, 0.14681002497673 }, { 1, 0.247902169823647 }, { 2, 0.166602239012718 }, { 3, 0.146221116185188 }, { 4, 0.146214917302132 }, { 5, 0.146249532699585 }, }), new(1, new Dictionary { { 0, 0.149800211191177 }, { 1, 0.252951383590698 }, { 2, 0.149627774953842 }, { 3, 0.149199306964874 }, { 4, 0.149192988872528 }, { 5, 0.149228319525719 }, }), new(1, new Dictionary { { 0, 0.164128586649895 }, { 1, 0.175637125968933 }, { 2, 0.169798880815506 }, { 3, 0.163470193743706 }, { 4, 0.163463264703751 }, { 5, 0.163501977920532 }, }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 }, }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 }, }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 }, }), new(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 }, }), new(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 }, }), new(1, new Dictionary { { 0, 0.142483577132225 }, { 1, 0.228974625468254 }, { 2, 0.142319530248642 }, { 3, 0.164056032896042 }, { 4, 0.14190599322319 }, { 5, 0.18026028573513 }, }), new(1, new Dictionary { { 0, 0.143185585737228 }, { 1, 0.230102762579918 }, { 2, 0.143020734190941 }, { 3, 0.16486431658268 }, { 4, 0.142605155706406 }, { 5, 0.176221489906311 }, }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 }, }), new(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 }, }), new(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 }, }), new(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 }, }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new(0, new Dictionary { { 0, 0.206467673182487 }, { 1, 0.166740626096725 }, { 2, 0.161198109388351 }, { 3, 0.155189976096153 }, { 4, 0.155183419585228 }, { 5, 0.155220150947571 }, }), new(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 }, }), new(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 }, }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 }, }), new(1, new Dictionary { { 0, 0.164174765348434 }, { 1, 0.209589347243309 }, { 2, 0.16105517745018 }, { 3, 0.155052363872528 }, { 4, 0.155045807361603 }, { 5, 0.155082508921623 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 }, }), new(3, new Dictionary { { 0, 0.149210333824158 }, { 1, 0.200883388519287 }, { 2, 0.149038568139076 }, { 3, 0.203621536493301 }, { 4, 0.148605495691299 }, { 5, 0.148640677332878 }, }), new(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 }, }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 }, }), new(0, new Dictionary { { 0, 0.205285787582397 }, { 1, 0.171510457992554 }, { 2, 0.160275369882584 }, { 3, 0.154301628470421 }, { 4, 0.154295101761818 }, { 5, 0.154331624507904 }, }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 }, }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 }, }), new(1, new Dictionary { { 0, 0.19794899225235 }, { 1, 0.201120212674141 }, { 2, 0.154547214508057 }, { 3, 0.148786976933479 }, { 4, 0.148780673742294 }, { 5, 0.148815900087357 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 }, }), new(1, new Dictionary { { 0, 0.198042631149292 }, { 1, 0.202442809939384 }, { 2, 0.150195524096489 }, { 3, 0.14976541697979 }, { 4, 0.149759083986282 }, { 5, 0.149794533848763 }, }), new(0, new Dictionary { { 0, 0.220375582575798 }, { 1, 0.189839035272598 }, { 2, 0.145878404378891 }, { 3, 0.16300305724144 }, { 4, 0.140435323119164 }, { 5, 0.140468567609787 }, }), new(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 }, }), new(1, new Dictionary { { 0, 0.157011136412621 }, { 1, 0.211385697126389 }, { 2, 0.162435546517372 }, { 3, 0.156381294131279 }, { 4, 0.156374678015709 }, { 5, 0.156411692500114 }, }), new(0, new Dictionary { { 0, 0.200586974620819 }, { 1, 0.161991447210312 }, { 2, 0.176644444465637 }, { 3, 0.150769785046577 }, { 4, 0.159208223223686 }, { 5, 0.150799110531807 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(2, new Dictionary { { 0, 0.192420691251755 }, { 1, 0.161742717027664 }, { 2, 0.209272593259811 }, { 3, 0.145513951778412 }, { 4, 0.145507797598839 }, { 5, 0.145542249083519 }, }), new(0, new Dictionary { { 0, 0.203436717391014 }, { 1, 0.165295079350471 }, { 2, 0.169711023569107 }, { 3, 0.153844580054283 }, { 4, 0.153838068246841 }, { 5, 0.15387450158596 }, }), new(0, new Dictionary { { 0, 0.202308386564255 }, { 1, 0.164378300309181 }, { 2, 0.174316108226776 }, { 3, 0.152991309762001 }, { 4, 0.152984827756882 }, { 5, 0.153021052479744 }, }), new(2, new Dictionary { { 0, 0.193465068936348 }, { 1, 0.15719299018383 }, { 2, 0.210408434271812 }, { 3, 0.146303743124008 }, { 4, 0.146297559142113 }, { 5, 0.146332189440727 }, }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 }, }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 }, }), new(0, new Dictionary { { 0, 0.221237704157829 }, { 1, 0.190581694245338 }, { 2, 0.165187060832977 }, { 3, 0.140990674495697 }, { 4, 0.140984714031219 }, { 5, 0.141018092632294 }, }), new(0, new Dictionary { { 0, 0.216337636113167 }, { 1, 0.186360627412796 }, { 2, 0.161528438329697 }, { 3, 0.160016357898712 }, { 4, 0.137862130999565 }, { 5, 0.13789476454258 }, }), new(3, new Dictionary { { 0, 0.149128466844559 }, { 1, 0.163378983736038 }, { 2, 0.148956805467606 }, { 3, 0.206476286053658 }, { 4, 0.148523956537247 }, { 5, 0.183535546064377 }, }), new(3, new Dictionary { { 0, 0.156232386827469 }, { 1, 0.167187243700027 }, { 2, 0.156052529811859 }, { 3, 0.209292829036713 }, { 4, 0.15559908747673 }, { 5, 0.15563590824604 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.14455483853817 }, { 1, 0.154690876603127 }, { 2, 0.144388437271118 }, { 3, 0.229516208171844 }, { 4, 0.143968880176544 }, { 5, 0.182880714535713 }, }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 }, }), new(3, new Dictionary { { 0, 0.153719380497932 }, { 1, 0.164498031139374 }, { 2, 0.153542414307594 }, { 3, 0.209774866700172 }, { 4, 0.153096258640289 }, { 5, 0.165369004011154 }, }), new(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 }, }), new(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 }, }), new(3, new Dictionary { { 0, 0.152724325656891 }, { 1, 0.160508275032043 }, { 2, 0.149818390607834 }, { 3, 0.238147541880608 }, { 4, 0.149383053183556 }, { 5, 0.14941842854023 }, }), new(1, new Dictionary { { 0, 0.152712091803551 }, { 1, 0.20559786260128 }, { 2, 0.152536302804947 }, { 3, 0.176534190773964 }, { 4, 0.152093067765236 }, { 5, 0.160526528954506 }, }), new(3, new Dictionary { { 0, 0.144403666257858 }, { 1, 0.194412112236023 }, { 2, 0.14423742890358 }, { 3, 0.229276165366173 }, { 4, 0.14381830394268 }, { 5, 0.143852367997169 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.152389481663704 }, { 1, 0.160156399011612 }, { 2, 0.149489924311638 }, { 3, 0.149061858654022 }, { 4, 0.23981149494648 }, { 5, 0.149090841412544 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.149491384625435 }, { 1, 0.163839146494865 }, { 2, 0.149319306015968 }, { 3, 0.148891717195511 }, { 4, 0.23953777551651 }, { 5, 0.148920670151711 }, }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 }, }), new(4, new Dictionary { { 0, 0.142711848020554 }, { 1, 0.152718678116798 }, { 2, 0.142547562718391 }, { 3, 0.191180393099785 }, { 4, 0.228674560785294 }, { 5, 0.142167016863823 }, }), new(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 }, }), new(4, new Dictionary { { 0, 0.151802018284798 }, { 1, 0.163394033908844 }, { 2, 0.148913636803627 }, { 3, 0.148487210273743 }, { 4, 0.238887012004852 }, { 5, 0.148516088724136 }, }), new(5, new Dictionary { { 0, 0.14544840157032 }, { 1, 0.159347251057625 }, { 2, 0.145280972123146 }, { 3, 0.144864946603775 }, { 4, 0.144858822226524 }, { 5, 0.26019960641861 }, }), new(5, new Dictionary { { 0, 0.158301413059235 }, { 1, 0.17342846095562 }, { 2, 0.158119171857834 }, { 3, 0.157666385173798 }, { 4, 0.157659709453583 }, { 5, 0.194824859499931 }, }), new(0, new Dictionary { { 0, 0.219101145863533 }, { 1, 0.15002153813839 }, { 2, 0.200809195637703 }, { 3, 0.139629080891609 }, { 4, 0.139623180031776 }, { 5, 0.15081587433815 }, }), new(5, new Dictionary { { 0, 0.158076956868172 }, { 1, 0.169161155819893 }, { 2, 0.157894983887672 }, { 3, 0.157442837953568 }, { 4, 0.157436162233353 }, { 5, 0.199987947940826 }, }), new(5, new Dictionary { { 0, 0.161370471119881 }, { 1, 0.16959510743618 }, { 2, 0.158300027251244 }, { 3, 0.157846719026566 }, { 4, 0.157840043306351 }, { 5, 0.195047691464424 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.160495221614838 }, { 1, 0.168675243854523 }, { 2, 0.157441437244415 }, { 3, 0.156990587711334 }, { 4, 0.156983941793442 }, { 5, 0.199413493275642 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145015180110931 }, { 1, 0.158933326601982 }, { 2, 0.144848257303238 }, { 3, 0.144433453679085 }, { 4, 0.147345185279846 }, { 5, 0.259424567222595 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.146661177277565 }, { 1, 0.152337715029716 }, { 2, 0.1464923620224 }, { 3, 0.146072864532471 }, { 4, 0.146066680550575 }, { 5, 0.262369185686111 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.138151779770851 }, { 1, 0.147838860750198 }, { 2, 0.13799275457859 }, { 3, 0.191278502345085 }, { 4, 0.137591779232025 }, { 5, 0.24714632332325 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 }, }), }; + var expected = new ProbabilityPrediction[] { new(0, new Dictionary { { 0, 0.204421937465668 }, { 1, 0.165088519454002 }, { 2, 0.169509157538414 }, { 3, 0.15365232527256 }, { 4, 0.15364582836628 }, { 5, 0.153682202100754 } }), new(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 } }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 } }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 } }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 } }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 } }), new(1, new Dictionary { { 0, 0.212972953915596 }, { 1, 0.230104967951775 }, { 2, 0.149730339646339 }, { 3, 0.13572371006012 }, { 4, 0.135717958211899 }, { 5, 0.135750100016594 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 } }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(1, new Dictionary { { 0, 0.171550869941711 }, { 1, 0.17407751083374 }, { 2, 0.168291121721268 }, { 3, 0.162018626928329 }, { 4, 0.162011757493019 }, { 5, 0.162050127983093 } }), new(0, new Dictionary { { 0, 0.200587809085846 }, { 1, 0.200458511710167 }, { 2, 0.1540387570858 }, { 3, 0.1482974588871 }, { 4, 0.148291185498238 }, { 5, 0.14832629263401 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.209190428256989 }, { 1, 0.166168510913849 }, { 2, 0.160645022988319 }, { 3, 0.154657498002052 }, { 4, 0.154650956392288 }, { 5, 0.154687568545341 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 } }), new(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.231333449482918 }, { 1, 0.158397167921066 }, { 2, 0.16797336935997 }, { 3, 0.147424504160881 }, { 4, 0.147418275475502 }, { 5, 0.147453173995018 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 } }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 } }), new(0, new Dictionary { { 0, 0.222247675061226 }, { 1, 0.191451728343964 }, { 2, 0.161376118659973 }, { 3, 0.141634315252304 }, { 4, 0.141628324985504 }, { 5, 0.14166185259819 } }), new(1, new Dictionary { { 0, 0.211069479584694 }, { 1, 0.217032581567764 }, { 2, 0.157594978809357 }, { 3, 0.134510651230812 }, { 4, 0.134504958987236 }, { 5, 0.145287364721298 } }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 } }), new(0, new Dictionary { { 0, 0.221573829650879 }, { 1, 0.151714622974396 }, { 2, 0.20307545363903 }, { 3, 0.141204878687859 }, { 4, 0.141198918223381 }, { 5, 0.141232341527939 } }), new(0, new Dictionary { { 0, 0.218772485852242 }, { 1, 0.18845808506012 }, { 2, 0.163346409797668 }, { 3, 0.13941964507103 }, { 4, 0.139413744211197 }, { 5, 0.150589644908905 } }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 } }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 } }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 } }), new(0, new Dictionary { { 0, 0.236084789037704 }, { 1, 0.161650478839874 }, { 2, 0.15088452398777 }, { 3, 0.150452449917793 }, { 4, 0.150446087121964 }, { 5, 0.150481700897217 } }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 } }), new(1, new Dictionary { { 0, 0.196985200047493 }, { 1, 0.201361879706383 }, { 2, 0.154732927680016 }, { 3, 0.148965761065483 }, { 4, 0.148959457874298 }, { 5, 0.148994728922844 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 } }), new(0, new Dictionary { { 0, 0.233522072434425 }, { 1, 0.165416672825813 }, { 2, 0.154580771923065 }, { 3, 0.148819282650948 }, { 4, 0.148812994360924 }, { 5, 0.148848220705986 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(0, new Dictionary { { 0, 0.22473056614399 }, { 1, 0.153876096010208 }, { 2, 0.157996505498886 }, { 3, 0.143216624855995 }, { 4, 0.143210560083389 }, { 5, 0.176969602704048 } }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 } }), new(0, new Dictionary { { 0, 0.228362649679184 }, { 1, 0.156363025307655 }, { 2, 0.170506909489632 }, { 3, 0.14553128182888 }, { 4, 0.153676524758339 }, { 5, 0.145559579133987 } }), new(0, new Dictionary { { 0, 0.232576459646225 }, { 1, 0.159248277544975 }, { 2, 0.16350269317627 }, { 3, 0.148216664791107 }, { 4, 0.148210391402245 }, { 5, 0.148245483636856 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 } }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 } }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 } }), new(0, new Dictionary { { 0, 0.231693357229233 }, { 1, 0.15864360332489 }, { 2, 0.162881851196289 }, { 3, 0.151451021432877 }, { 4, 0.147647619247437 }, { 5, 0.147682577371597 } }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 } }), new(1, new Dictionary { { 0, 0.18250384926796 }, { 1, 0.232570126652718 }, { 2, 0.151334419846535 }, { 3, 0.15921525657177 }, { 4, 0.137171939015388 }, { 5, 0.137204423546791 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.185695022344589 }, { 1, 0.238080278038979 }, { 2, 0.154919907450676 }, { 3, 0.140427812933922 }, { 4, 0.140421867370605 }, { 5, 0.14045512676239 } }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 } }), new(1, new Dictionary { { 0, 0.187391951680183 }, { 1, 0.240255907177925 }, { 2, 0.147197380661964 }, { 3, 0.141711086034775 }, { 4, 0.141705080866814 }, { 5, 0.141738638281822 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.161344453692436 }, { 1, 0.186508595943451 }, { 2, 0.161158725619316 }, { 3, 0.160697221755981 }, { 4, 0.160690426826477 }, { 5, 0.169600605964661 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 } }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 } }), new(1, new Dictionary { { 0, 0.149003386497498 }, { 1, 0.251605868339539 }, { 2, 0.154151156544685 }, { 3, 0.14840567111969 }, { 4, 0.148399382829666 }, { 5, 0.148434519767761 } }), new(1, new Dictionary { { 0, 0.154577597975731 }, { 1, 0.20810940861702 }, { 2, 0.175417006015778 }, { 3, 0.153957515954971 }, { 4, 0.15395100414753 }, { 5, 0.15398745238781 } }), new(1, new Dictionary { { 0, 0.14681002497673 }, { 1, 0.247902169823647 }, { 2, 0.166602239012718 }, { 3, 0.146221116185188 }, { 4, 0.146214917302132 }, { 5, 0.146249532699585 } }), new(1, new Dictionary { { 0, 0.149800211191177 }, { 1, 0.252951383590698 }, { 2, 0.149627774953842 }, { 3, 0.149199306964874 }, { 4, 0.149192988872528 }, { 5, 0.149228319525719 } }), new(1, new Dictionary { { 0, 0.164128586649895 }, { 1, 0.175637125968933 }, { 2, 0.169798880815506 }, { 3, 0.163470193743706 }, { 4, 0.163463264703751 }, { 5, 0.163501977920532 } }), new(0, new Dictionary { { 0, 0.223394736647606 }, { 1, 0.192439839243889 }, { 2, 0.157047867774963 }, { 3, 0.142365306615829 }, { 4, 0.142359286546707 }, { 5, 0.142392992973328 } }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 } }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 } }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 } }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 } }), new(1, new Dictionary { { 0, 0.165095925331116 }, { 1, 0.176672294735909 }, { 2, 0.164905861020088 }, { 3, 0.164433643221855 }, { 4, 0.164426684379578 }, { 5, 0.164465621113777 } }), new(0, new Dictionary { { 0, 0.226629555225372 }, { 1, 0.195226430892944 }, { 2, 0.144841581583023 }, { 3, 0.144426807761192 }, { 4, 0.144420698285103 }, { 5, 0.144454896450043 } }), new(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 } }), new(1, new Dictionary { { 0, 0.215209871530533 }, { 1, 0.221289947628975 }, { 2, 0.137543112039566 }, { 3, 0.140676274895668 }, { 4, 0.137143447995186 }, { 5, 0.148137360811234 } }), new(1, new Dictionary { { 0, 0.142483577132225 }, { 1, 0.228974625468254 }, { 2, 0.142319530248642 }, { 3, 0.164056032896042 }, { 4, 0.14190599322319 }, { 5, 0.18026028573513 } }), new(1, new Dictionary { { 0, 0.143185585737228 }, { 1, 0.230102762579918 }, { 2, 0.143020734190941 }, { 3, 0.16486431658268 }, { 4, 0.142605155706406 }, { 5, 0.176221489906311 } }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 } }), new(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 } }), new(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 } }), new(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 } }), new(1, new Dictionary { { 0, 0.150330767035484 }, { 1, 0.237261682748795 }, { 2, 0.147470369935036 }, { 3, 0.150829672813416 }, { 4, 0.155278235673904 }, { 5, 0.158829256892204 } }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 } }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 } }), new(0, new Dictionary { { 0, 0.206467673182487 }, { 1, 0.166740626096725 }, { 2, 0.161198109388351 }, { 3, 0.155189976096153 }, { 4, 0.155183419585228 }, { 5, 0.155220150947571 } }), new(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 } }), new(1, new Dictionary { { 0, 0.188319802284241 }, { 1, 0.239981546998024 }, { 2, 0.147029295563698 }, { 3, 0.14154925942421 }, { 4, 0.14154326915741 }, { 5, 0.141576781868935 } }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(0, new Dictionary { { 0, 0.234818488359451 }, { 1, 0.160783424973488 }, { 2, 0.15543894469738 }, { 3, 0.149645462632179 }, { 4, 0.149639129638672 }, { 5, 0.149674564599991 } }), new(1, new Dictionary { { 0, 0.164174765348434 }, { 1, 0.209589347243309 }, { 2, 0.16105517745018 }, { 3, 0.155052363872528 }, { 4, 0.155045807361603 }, { 5, 0.155082508921623 } }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 } }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 } }), new(1, new Dictionary { { 0, 0.157896161079407 }, { 1, 0.21257720887661 }, { 2, 0.157714396715164 }, { 3, 0.157262772321701 }, { 4, 0.157256111502647 }, { 5, 0.15729333460331 } }), new(3, new Dictionary { { 0, 0.149210333824158 }, { 1, 0.200883388519287 }, { 2, 0.149038568139076 }, { 3, 0.203621536493301 }, { 4, 0.148605495691299 }, { 5, 0.148640677332878 } }), new(1, new Dictionary { { 0, 0.166896566748619 }, { 1, 0.175402864813805 }, { 2, 0.163720980286598 }, { 3, 0.167450442910194 }, { 4, 0.163245245814323 }, { 5, 0.16328389942646 } }), new(1, new Dictionary { { 0, 0.151579231023788 }, { 1, 0.239232078194618 }, { 2, 0.148695081472397 }, { 3, 0.152082279324532 }, { 4, 0.14826300740242 }, { 5, 0.160148292779922 } }), new(0, new Dictionary { { 0, 0.205285787582397 }, { 1, 0.171510457992554 }, { 2, 0.160275369882584 }, { 3, 0.154301628470421 }, { 4, 0.154295101761818 }, { 5, 0.154331624507904 } }), new(1, new Dictionary { { 0, 0.154403537511826 }, { 1, 0.247229039669037 }, { 2, 0.160873055458069 }, { 3, 0.145824074745178 }, { 4, 0.145817905664444 }, { 5, 0.14585243165493 } }), new(1, new Dictionary { { 0, 0.214853376150131 }, { 1, 0.232136651873589 }, { 2, 0.142222970724106 }, { 3, 0.13692207634449 }, { 4, 0.136916279792786 }, { 5, 0.136948689818382 } }), new(1, new Dictionary { { 0, 0.19794899225235 }, { 1, 0.201120212674141 }, { 2, 0.154547214508057 }, { 3, 0.148786976933479 }, { 4, 0.148780673742294 }, { 5, 0.148815900087357 } }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.190860792994499 }, { 1, 0.239230290055275 }, { 2, 0.146569013595581 }, { 3, 0.141106128692627 }, { 4, 0.141100153326988 }, { 5, 0.141133576631546 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(0, new Dictionary { { 0, 0.225462421774864 }, { 1, 0.194221019744873 }, { 2, 0.149245649576187 }, { 3, 0.143683016300201 }, { 4, 0.143676936626434 }, { 5, 0.14371095597744 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.155869245529175 }, { 1, 0.249575912952423 }, { 2, 0.152907475829124 }, { 3, 0.147208347916603 }, { 4, 0.147202104330063 }, { 5, 0.147236958146095 } }), new(1, new Dictionary { { 0, 0.198042631149292 }, { 1, 0.202442809939384 }, { 2, 0.150195524096489 }, { 3, 0.14976541697979 }, { 4, 0.149759083986282 }, { 5, 0.149794533848763 } }), new(0, new Dictionary { { 0, 0.220375582575798 }, { 1, 0.189839035272598 }, { 2, 0.145878404378891 }, { 3, 0.16300305724144 }, { 4, 0.140435323119164 }, { 5, 0.140468567609787 } }), new(0, new Dictionary { { 0, 0.232574164867401 }, { 1, 0.15924671292305 }, { 2, 0.16351093351841 }, { 3, 0.14821520447731 }, { 4, 0.148208931088448 }, { 5, 0.148244023323059 } }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 } }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 } }), new(0, new Dictionary { { 0, 0.205473050475121 }, { 1, 0.16694962978363 }, { 2, 0.161400184035301 }, { 3, 0.155384510755539 }, { 4, 0.155377939343452 }, { 5, 0.15541473031044 } }), new(1, new Dictionary { { 0, 0.157011136412621 }, { 1, 0.211385697126389 }, { 2, 0.162435546517372 }, { 3, 0.156381294131279 }, { 4, 0.156374678015709 }, { 5, 0.156411692500114 } }), new(0, new Dictionary { { 0, 0.200586974620819 }, { 1, 0.161991447210312 }, { 2, 0.176644444465637 }, { 3, 0.150769785046577 }, { 4, 0.159208223223686 }, { 5, 0.150799110531807 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(2, new Dictionary { { 0, 0.192420691251755 }, { 1, 0.161742717027664 }, { 2, 0.209272593259811 }, { 3, 0.145513951778412 }, { 4, 0.145507797598839 }, { 5, 0.145542249083519 } }), new(0, new Dictionary { { 0, 0.203436717391014 }, { 1, 0.165295079350471 }, { 2, 0.169711023569107 }, { 3, 0.153844580054283 }, { 4, 0.153838068246841 }, { 5, 0.15387450158596 } }), new(0, new Dictionary { { 0, 0.202308386564255 }, { 1, 0.164378300309181 }, { 2, 0.174316108226776 }, { 3, 0.152991309762001 }, { 4, 0.152984827756882 }, { 5, 0.153021052479744 } }), new(2, new Dictionary { { 0, 0.193465068936348 }, { 1, 0.15719299018383 }, { 2, 0.210408434271812 }, { 3, 0.146303743124008 }, { 4, 0.146297559142113 }, { 5, 0.146332189440727 } }), new(0, new Dictionary { { 0, 0.230239436030388 }, { 1, 0.157648086547852 }, { 2, 0.17190819978714 }, { 3, 0.146727308630943 }, { 4, 0.146721109747887 }, { 5, 0.14675584435463 } }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 } }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 } }), new(2, new Dictionary { { 0, 0.154068276286125 }, { 1, 0.164871394634247 }, { 2, 0.220686241984367 }, { 3, 0.153450235724449 }, { 4, 0.153443738818169 }, { 5, 0.15348008275032 } }), new(0, new Dictionary { { 0, 0.221237704157829 }, { 1, 0.190581694245338 }, { 2, 0.165187060832977 }, { 3, 0.140990674495697 }, { 4, 0.140984714031219 }, { 5, 0.141018092632294 } }), new(0, new Dictionary { { 0, 0.216337636113167 }, { 1, 0.186360627412796 }, { 2, 0.161528438329697 }, { 3, 0.160016357898712 }, { 4, 0.137862130999565 }, { 5, 0.13789476454258 } }), new(3, new Dictionary { { 0, 0.149128466844559 }, { 1, 0.163378983736038 }, { 2, 0.148956805467606 }, { 3, 0.206476286053658 }, { 4, 0.148523956537247 }, { 5, 0.183535546064377 } }), new(3, new Dictionary { { 0, 0.156232386827469 }, { 1, 0.167187243700027 }, { 2, 0.156052529811859 }, { 3, 0.209292829036713 }, { 4, 0.15559908747673 }, { 5, 0.15563590824604 } }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 } }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 } }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 } }), new(3, new Dictionary { { 0, 0.14455483853817 }, { 1, 0.154690876603127 }, { 2, 0.144388437271118 }, { 3, 0.229516208171844 }, { 4, 0.143968880176544 }, { 5, 0.182880714535713 } }), new(3, new Dictionary { { 0, 0.150402143597603 }, { 1, 0.160948187112808 }, { 2, 0.150229007005692 }, { 3, 0.238800227642059 }, { 4, 0.149792477488518 }, { 5, 0.149827942252159 } }), new(3, new Dictionary { { 0, 0.153719380497932 }, { 1, 0.164498031139374 }, { 2, 0.153542414307594 }, { 3, 0.209774866700172 }, { 4, 0.153096258640289 }, { 5, 0.165369004011154 } }), new(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 } }), new(3, new Dictionary { { 0, 0.154578030109406 }, { 1, 0.160560995340347 }, { 2, 0.15440009534359 }, { 3, 0.214021503925323 }, { 4, 0.153951436281204 }, { 5, 0.16248793900013 } }), new(3, new Dictionary { { 0, 0.152724325656891 }, { 1, 0.160508275032043 }, { 2, 0.149818390607834 }, { 3, 0.238147541880608 }, { 4, 0.149383053183556 }, { 5, 0.14941842854023 } }), new(1, new Dictionary { { 0, 0.152712091803551 }, { 1, 0.20559786260128 }, { 2, 0.152536302804947 }, { 3, 0.176534190773964 }, { 4, 0.152093067765236 }, { 5, 0.160526528954506 } }), new(3, new Dictionary { { 0, 0.144403666257858 }, { 1, 0.194412112236023 }, { 2, 0.14423742890358 }, { 3, 0.229276165366173 }, { 4, 0.14381830394268 }, { 5, 0.143852367997169 } }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 } }), new(4, new Dictionary { { 0, 0.152389481663704 }, { 1, 0.160156399011612 }, { 2, 0.149489924311638 }, { 3, 0.149061858654022 }, { 4, 0.23981149494648 }, { 5, 0.149090841412544 } }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 } }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 } }), new(4, new Dictionary { { 0, 0.149491384625435 }, { 1, 0.163839146494865 }, { 2, 0.149319306015968 }, { 3, 0.148891717195511 }, { 4, 0.23953777551651 }, { 5, 0.148920670151711 } }), new(4, new Dictionary { { 0, 0.150071501731873 }, { 1, 0.160594373941422 }, { 2, 0.149898752570152 }, { 3, 0.149469509720802 }, { 4, 0.240467309951782 }, { 5, 0.14949856698513 } }), new(4, new Dictionary { { 0, 0.142711848020554 }, { 1, 0.152718678116798 }, { 2, 0.142547562718391 }, { 3, 0.191180393099785 }, { 4, 0.228674560785294 }, { 5, 0.142167016863823 } }), new(4, new Dictionary { { 0, 0.151807546615601 }, { 1, 0.159544795751572 }, { 2, 0.148919060826302 }, { 3, 0.152311354875565 }, { 4, 0.23889571428299 }, { 5, 0.148521512746811 } }), new(4, new Dictionary { { 0, 0.151802018284798 }, { 1, 0.163394033908844 }, { 2, 0.148913636803627 }, { 3, 0.148487210273743 }, { 4, 0.238887012004852 }, { 5, 0.148516088724136 } }), new(5, new Dictionary { { 0, 0.14544840157032 }, { 1, 0.159347251057625 }, { 2, 0.145280972123146 }, { 3, 0.144864946603775 }, { 4, 0.144858822226524 }, { 5, 0.26019960641861 } }), new(5, new Dictionary { { 0, 0.158301413059235 }, { 1, 0.17342846095562 }, { 2, 0.158119171857834 }, { 3, 0.157666385173798 }, { 4, 0.157659709453583 }, { 5, 0.194824859499931 } }), new(0, new Dictionary { { 0, 0.219101145863533 }, { 1, 0.15002153813839 }, { 2, 0.200809195637703 }, { 3, 0.139629080891609 }, { 4, 0.139623180031776 }, { 5, 0.15081587433815 } }), new(5, new Dictionary { { 0, 0.158076956868172 }, { 1, 0.169161155819893 }, { 2, 0.157894983887672 }, { 3, 0.157442837953568 }, { 4, 0.157436162233353 }, { 5, 0.199987947940826 } }), new(5, new Dictionary { { 0, 0.161370471119881 }, { 1, 0.16959510743618 }, { 2, 0.158300027251244 }, { 3, 0.157846719026566 }, { 4, 0.157840043306351 }, { 5, 0.195047691464424 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 } }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 } }), new(5, new Dictionary { { 0, 0.145988583564758 }, { 1, 0.15622515976429 }, { 2, 0.145820543169975 }, { 3, 0.145402953028679 }, { 4, 0.145396813750267 }, { 5, 0.261165946722031 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.160495221614838 }, { 1, 0.168675243854523 }, { 2, 0.157441437244415 }, { 3, 0.156990587711334 }, { 4, 0.156983941793442 }, { 5, 0.199413493275642 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145015180110931 }, { 1, 0.158933326601982 }, { 2, 0.144848257303238 }, { 3, 0.144433453679085 }, { 4, 0.147345185279846 }, { 5, 0.259424567222595 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.146661177277565 }, { 1, 0.152337715029716 }, { 2, 0.1464923620224 }, { 3, 0.146072864532471 }, { 4, 0.146066680550575 }, { 5, 0.262369185686111 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.138151779770851 }, { 1, 0.147838860750198 }, { 2, 0.13799275457859 }, { 3, 0.191278502345085 }, { 4, 0.137591779232025 }, { 5, 0.24714632332325 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }), new(5, new Dictionary { { 0, 0.145561009645462 }, { 1, 0.155767604708672 }, { 2, 0.145393446087837 }, { 3, 0.14497709274292 }, { 4, 0.14789979159832 }, { 5, 0.260401040315628 } }) }; CollectionAssert.AreEqual(expected, actual); } diff --git a/src/SharpLearning.XGBoost.Test/Models/FeatureImportanceParserTest.cs b/src/SharpLearning.XGBoost.Test/Models/FeatureImportanceParserTest.cs index 913beae1..a22f5859 100644 --- a/src/SharpLearning.XGBoost.Test/Models/FeatureImportanceParserTest.cs +++ b/src/SharpLearning.XGBoost.Test/Models/FeatureImportanceParserTest.cs @@ -23,7 +23,7 @@ public void FeatureImportanceParser_Parse() 43.939899999999994, 134.3104, 0, - 19.014670000000002 + 19.014670000000002, }; Assert.AreEqual(expected.Length, actual.Length); @@ -34,9 +34,9 @@ public void FeatureImportanceParser_Parse() } } - readonly string[] m_textTrees = [m_tree1, m_tree2]; + readonly string[] m_textTrees = [Tree1, Tree2]; - const string m_tree1 = @"booster[0] + const string Tree1 = @"booster[0] 0:[f2<2.695] yes=1,no=2,missing=1,gain=343.922,cover=214 1:[f6<9.81] yes=3,no=4,missing=3,gain=74.1261,cover=61 3:[f8<0.13] yes=7,no=8,missing=7,gain=10.7401,cover=37 @@ -51,8 +51,7 @@ public void FeatureImportanceParser_Parse() 12:leaf=-0.0020202,cover=98 6:leaf=0.3,cover=2"; - - const string m_tree2 = @"booster[1] + const string Tree2 = @"booster[1] 0:[f2<2.695] yes=1,no=2,missing=1,gain=280.77,cover=214 1:[f6<9.81] yes=3,no=4,missing=3,gain=60.1843,cover=61 3:[f8<0.13] yes=7,no=8,missing=7,gain=8.27457,cover=37 diff --git a/src/SharpLearning.XGBoost.Test/Models/XGBoostTreeConverterTest.cs b/src/SharpLearning.XGBoost.Test/Models/XGBoostTreeConverterTest.cs index 33e6f456..b6c131ed 100644 --- a/src/SharpLearning.XGBoost.Test/Models/XGBoostTreeConverterTest.cs +++ b/src/SharpLearning.XGBoost.Test/Models/XGBoostTreeConverterTest.cs @@ -42,7 +42,6 @@ public void XGBoostTreeConverter_ConvertXGBoostTextTreeToGBMTree_Tree_2() } } - [TestMethod] public void XGBoostTreeConverter_IsLeaf() { @@ -152,7 +151,7 @@ void AssertGBMNode(GBMNode expected, GBMNode actual) LeftConstant = -1, RightConstant = -1, LeftIndex = 2, - RightIndex = 3 + RightIndex = 3, }, /*1*/ @@ -163,7 +162,7 @@ void AssertGBMNode(GBMNode expected, GBMNode actual) LeftConstant = -1, RightConstant = -1, LeftIndex = 4, - RightIndex = 5 + RightIndex = 5, }, /*2*/ @@ -174,7 +173,7 @@ void AssertGBMNode(GBMNode expected, GBMNode actual) LeftConstant = -1, RightConstant = 0.3, LeftIndex = 6, - RightIndex = -1 + RightIndex = -1, }, /*3*/ @@ -185,7 +184,7 @@ void AssertGBMNode(GBMNode expected, GBMNode actual) LeftConstant = 0.404167, RightConstant = 0.1, LeftIndex = -1, - RightIndex = -1 + RightIndex = -1, }, /*4*/ @@ -196,7 +195,7 @@ void AssertGBMNode(GBMNode expected, GBMNode actual) LeftConstant = 0.0444444, RightConstant = 0.205882, LeftIndex = -1, - RightIndex = -1 + RightIndex = -1, }, /*5*/ @@ -207,7 +206,7 @@ void AssertGBMNode(GBMNode expected, GBMNode actual) LeftConstant = 0.0527778, RightConstant = -0.0020202, LeftIndex = -1, - RightIndex = -1 + RightIndex = -1, }, ]; @@ -244,7 +243,7 @@ void AssertGBMNode(GBMNode expected, GBMNode actual) LeftConstant = -1, RightConstant = -1, LeftIndex = 2, - RightIndex = 3 + RightIndex = 3, }, /*1*/ @@ -255,7 +254,7 @@ void AssertGBMNode(GBMNode expected, GBMNode actual) LeftConstant = -1, RightConstant = -1, LeftIndex = 4, - RightIndex = 5 + RightIndex = 5, }, /*2*/ @@ -266,7 +265,7 @@ void AssertGBMNode(GBMNode expected, GBMNode actual) LeftConstant = 0.140797, RightConstant = -1, LeftIndex = -1, - RightIndex = 6 + RightIndex = 6, }, /*3*/ @@ -277,7 +276,7 @@ void AssertGBMNode(GBMNode expected, GBMNode actual) LeftConstant = 0.0747358, RightConstant = -0.0296133, LeftIndex = -1, - RightIndex = -1 + RightIndex = -1, }, /*4*/ @@ -288,7 +287,7 @@ void AssertGBMNode(GBMNode expected, GBMNode actual) LeftConstant = 0.154654, RightConstant = -0.0200209, LeftIndex = -1, - RightIndex = -1 + RightIndex = -1, }, /*5*/ @@ -299,7 +298,7 @@ void AssertGBMNode(GBMNode expected, GBMNode actual) LeftConstant = 0.0257847, RightConstant = -0.00524031, LeftIndex = -1, - RightIndex = -1 + RightIndex = -1, }, ]; } diff --git a/src/SharpLearning.XGBoost/BoosterType.cs b/src/SharpLearning.XGBoost/BoosterType.cs index c86814ab..333e9d23 100644 --- a/src/SharpLearning.XGBoost/BoosterType.cs +++ b/src/SharpLearning.XGBoost/BoosterType.cs @@ -19,5 +19,5 @@ public enum BoosterType /// DART: Dropouts meet Multiple Additive Regression Trees. /// http://xgboost.readthedocs.io/en/latest/tutorials/dart.html /// - DART + DART, } diff --git a/src/SharpLearning.XGBoost/BoosterTypeExtensions.cs b/src/SharpLearning.XGBoost/BoosterTypeExtensions.cs index 227bbeca..c0114fc2 100644 --- a/src/SharpLearning.XGBoost/BoosterTypeExtensions.cs +++ b/src/SharpLearning.XGBoost/BoosterTypeExtensions.cs @@ -3,7 +3,7 @@ namespace SharpLearning.XGBoost; /// -/// +/// /// public static class BoosterTypeExtensions { diff --git a/src/SharpLearning.XGBoost/ClassificationObjective.cs b/src/SharpLearning.XGBoost/ClassificationObjective.cs index c10e3f42..a7281338 100644 --- a/src/SharpLearning.XGBoost/ClassificationObjective.cs +++ b/src/SharpLearning.XGBoost/ClassificationObjective.cs @@ -17,14 +17,14 @@ public enum ClassificationObjective /// /// GPU version of binary logistic regression evaluated on the GPU, - /// note that like the GPU histogram algorithm, + /// note that like the GPU histogram algorithm, /// they can only be used when the entire training session uses the same dataset. /// GPUBinaryLogistic, /// /// GPU version of binary logistic regression raw evaluated on the GPU, - /// note that like the GPU histogram algorithm, + /// note that like the GPU histogram algorithm, /// they can only be used when the entire training session uses the same dataset. /// GPUBinaryLogisticRaw, @@ -35,7 +35,7 @@ public enum ClassificationObjective Softmax, /// - /// same as softmax, but output a vector of ndata * nclass, which can be further reshaped to ndata, + /// same as softmax, but output a vector of ndata * nclass, which can be further reshaped to ndata, /// nclass matrix.The result contains predicted probability of each data point belonging to each class. /// SoftProb, diff --git a/src/SharpLearning.XGBoost/Learners/ClassificationXGBoostLearner.cs b/src/SharpLearning.XGBoost/Learners/ClassificationXGBoostLearner.cs index ed2cdde3..636ef20a 100644 --- a/src/SharpLearning.XGBoost/Learners/ClassificationXGBoostLearner.cs +++ b/src/SharpLearning.XGBoost/Learners/ClassificationXGBoostLearner.cs @@ -20,7 +20,7 @@ public sealed class ClassificationXGBoostLearner readonly IDictionary m_parameters = new Dictionary(); /// - /// Classification learner for XGBoost. For classification problems, + /// Classification learner for XGBoost. For classification problems, /// XGBoost requires that target values are sequential and start at 0. /// /// Maximum tree depth for base learners. (default is 3) @@ -28,7 +28,7 @@ public sealed class ClassificationXGBoostLearner /// Number of estimators to fit. (default is 100) /// Whether to print messages while running boosting. (default is false) /// Specify the learning task and the corresponding learning objective. (default is softmax) - /// which booster to use, can be gbtree, gblinear or dart. + /// which booster to use, can be gbtree, gblinear or dart. /// gbtree and dart use tree based model while gblinear uses linear function (default is gbtree) /// The tree construction algorithm used in XGBoost. See reference paper: https://arxiv.org/abs/1603.02754. (default is auto) /// Type of sampling algorithm for DART. (default is uniform) @@ -107,7 +107,7 @@ public ClassificationXGBoostLearner(int maximumTreeDepth = 3, objective = ClassificationObjective.SoftProb; } - m_parameters[ParameterNames.objective] = objective.ToXGBoostString(); + m_parameters[ParameterNames.Objective] = objective.ToXGBoostString(); m_parameters[ParameterNames.Threads] = numberOfThreads; m_parameters[ParameterNames.Gamma] = (float)gamma; @@ -162,7 +162,7 @@ public ClassificationXGBoostModel Learn(F64Matrix observations, double[] targets var floatTargets = targets.ToFloat(indices); // Only specify XGBoost number of classes if the objective is multi-class. - var objective = (string)m_parameters[ParameterNames.objective]; + var objective = (string)m_parameters[ParameterNames.Objective]; if (objective == ClassificationObjective.Softmax.ToXGBoostString() || objective == ClassificationObjective.SoftProb.ToXGBoostString()) { diff --git a/src/SharpLearning.XGBoost/Learners/RegressionXGBoostLearner.cs b/src/SharpLearning.XGBoost/Learners/RegressionXGBoostLearner.cs index fa263ee8..bedfa29c 100644 --- a/src/SharpLearning.XGBoost/Learners/RegressionXGBoostLearner.cs +++ b/src/SharpLearning.XGBoost/Learners/RegressionXGBoostLearner.cs @@ -23,7 +23,7 @@ public sealed class RegressionXGBoostLearner : ILearner, IIndexedLearner /// Number of estimators to fit. (default is 100) /// Whether to print messages while running boosting. (default is false) /// Specify the learning task and the corresponding learning objective. (default is LinearRegression) - /// which booster to use, can be gbtree, gblinear or dart. + /// which booster to use, can be gbtree, gblinear or dart. /// gbtree and dart use tree based model while gblinear uses linear function (default is gbtree) /// The tree construction algorithm used in XGBoost. See reference paper: https://arxiv.org/abs/1603.02754. (default is auto) /// Type of sampling algorithm for DART. (default is uniform) @@ -92,7 +92,7 @@ public RegressionXGBoostLearner( m_parameters[ParameterNames.LearningRate] = (float)learningRate; m_parameters[ParameterNames.Estimators] = estimators; m_parameters[ParameterNames.Silent] = silent; - m_parameters[ParameterNames.objective] = objective.ToXGBoostString(); + m_parameters[ParameterNames.Objective] = objective.ToXGBoostString(); m_parameters[ParameterNames.Threads] = numberOfThreads; m_parameters[ParameterNames.Gamma] = (float)gamma; diff --git a/src/SharpLearning.XGBoost/Models/ClassificationXGBoostModel.cs b/src/SharpLearning.XGBoost/Models/ClassificationXGBoostModel.cs index b4d7a4a2..3d53bcec 100644 --- a/src/SharpLearning.XGBoost/Models/ClassificationXGBoostModel.cs +++ b/src/SharpLearning.XGBoost/Models/ClassificationXGBoostModel.cs @@ -9,7 +9,7 @@ namespace SharpLearning.XGBoost.Models; /// -/// +/// /// public sealed class ClassificationXGBoostModel : IDisposable @@ -19,7 +19,7 @@ public sealed class ClassificationXGBoostModel readonly Booster m_model; /// - /// + /// /// /// public ClassificationXGBoostModel(Booster model) @@ -28,7 +28,7 @@ public ClassificationXGBoostModel(Booster model) } /// - /// + /// /// /// /// @@ -43,18 +43,11 @@ public double Predict(double[] observation) var prediction = m_model.Predict(data); var numberOfClasses = prediction.Length; - if (numberOfClasses >= 2) - { - return PredictMultiClass(prediction); - } - else - { - return PredictSingleClass(prediction); - } + return numberOfClasses >= 2 ? PredictMultiClass(prediction) : PredictSingleClass(prediction); } /// - /// + /// /// /// /// @@ -75,7 +68,7 @@ public double[] Predict(F64Matrix observations) } /// - /// + /// /// /// /// @@ -90,18 +83,11 @@ public ProbabilityPrediction PredictProbability(double[] observation) var prediction = m_model.Predict(data); var numberOfClasses = prediction.Length; - if (numberOfClasses >= 2) - { - return PredictMultiClassProbability(prediction); - } - else - { - return PredictSingleClassProbability(prediction); - } + return numberOfClasses >= 2 ? PredictMultiClassProbability(prediction) : PredictSingleClassProbability(prediction); } /// - /// + /// /// /// /// @@ -167,15 +153,9 @@ public static ClassificationXGBoostModel Load(string modelFilePath) /// public void Save(string modelFilePath) => m_model.Save(modelFilePath); - /// - /// - /// public void Dispose() { - if (m_model != null) - { - m_model.Dispose(); - } + m_model?.Dispose(); } ProbabilityPrediction IPredictor.Predict(double[] observation) diff --git a/src/SharpLearning.XGBoost/Models/RegressionXGBoostModel.cs b/src/SharpLearning.XGBoost/Models/RegressionXGBoostModel.cs index 64668511..775c9314 100644 --- a/src/SharpLearning.XGBoost/Models/RegressionXGBoostModel.cs +++ b/src/SharpLearning.XGBoost/Models/RegressionXGBoostModel.cs @@ -8,14 +8,14 @@ namespace SharpLearning.XGBoost.Models; /// -/// +/// /// public sealed class RegressionXGBoostModel : IDisposable, IPredictorModel { readonly Booster m_model; /// - /// + /// /// /// public RegressionXGBoostModel(Booster model) @@ -24,7 +24,7 @@ public RegressionXGBoostModel(Booster model) } /// - /// + /// /// /// /// @@ -40,7 +40,7 @@ public double Predict(double[] observation) } /// - /// + /// /// /// /// @@ -83,7 +83,6 @@ public Dictionary GetVariableImportance(Dictionary .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); } - /// /// Loads a RegressionXGBoostModel. /// @@ -99,14 +98,8 @@ public static RegressionXGBoostModel Load(string modelFilePath) public void Save(string modelFilePath) => m_model.Save(modelFilePath); - /// - /// - /// public void Dispose() { - if (m_model != null) - { - m_model.Dispose(); - } + m_model?.Dispose(); } } diff --git a/src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs b/src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs index c66eb5d5..e8d1f036 100644 --- a/src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs +++ b/src/SharpLearning.XGBoost/Models/XGBoostTreeConverter.cs @@ -11,9 +11,9 @@ namespace SharpLearning.XGBoost.Models; /// public static class XGBoostTreeConverter { - static readonly string[] m_leafSplit = ["leaf="]; - static readonly string[] m_yesSplit = ["yes="]; - static readonly string[] m_noSplit = ["no="]; + static readonly string[] LeafSplit = ["leaf="]; + static readonly string[] YesSplit = ["yes="]; + static readonly string[] NoSplit = ["no="]; /// /// Parse array of feature importance values from text dump of XGBoost trees. @@ -69,12 +69,7 @@ static List ConvertXGBoostNodesToGBMNodes(string textTree) var nodeIndex = 1; foreach (var line in orderedLines.Values) { - if (IsLeaf(line)) - { - // Leafs are not added as nodes, leaf values are included in the split nodes. - continue; - } - else + if (!IsLeaf(line)) { var featureIndex = ParseFeatureIndex(line); var splitValue = ParseSplitValue(line); @@ -88,7 +83,7 @@ static List ConvertXGBoostNodesToGBMNodes(string textTree) LeftConstant = -1, LeftIndex = -1, RightConstant = -1, - RightIndex = -1 + RightIndex = -1, }; var left = orderedLines[yesIndex]; @@ -139,7 +134,7 @@ public static bool IsLeaf(string line) /// public static double ParseLeafValue(string line) { - var valueLine = line.Split(m_leafSplit, StringSplitOptions.RemoveEmptyEntries)[1]; + var valueLine = line.Split(LeafSplit, StringSplitOptions.RemoveEmptyEntries)[1]; var valueString = valueLine.Split(',')[0]; var value = FloatingPointConversion.ToF64(valueString); return value; @@ -184,7 +179,7 @@ public static double ParseSplitValue(string line) /// public static int ParseYesIndex(string line) { - return SplitYesNoIndex(line, m_yesSplit); + return SplitYesNoIndex(line, YesSplit); } /// @@ -194,7 +189,7 @@ public static int ParseYesIndex(string line) /// public static int ParseNoIndex(string line) { - return SplitYesNoIndex(line, m_noSplit); + return SplitYesNoIndex(line, NoSplit); } /// diff --git a/src/SharpLearning.XGBoost/NormalizeType.cs b/src/SharpLearning.XGBoost/NormalizeType.cs index 49e1b1ec..b16dcae0 100644 --- a/src/SharpLearning.XGBoost/NormalizeType.cs +++ b/src/SharpLearning.XGBoost/NormalizeType.cs @@ -17,5 +17,5 @@ public enum NormalizeType /// Weight of new trees are 1 / (1 + learning_rate). /// Dropped trees are scaled by a factor of 1 / (1 + learning_rate) /// - Forest + Forest, } diff --git a/src/SharpLearning.XGBoost/NormalizeTypeExtensions.cs b/src/SharpLearning.XGBoost/NormalizeTypeExtensions.cs index d0fe904a..1d0188d5 100644 --- a/src/SharpLearning.XGBoost/NormalizeTypeExtensions.cs +++ b/src/SharpLearning.XGBoost/NormalizeTypeExtensions.cs @@ -3,7 +3,7 @@ namespace SharpLearning.XGBoost; /// -/// +/// /// public static class NormalizeTypeExtensions { diff --git a/src/SharpLearning.XGBoost/ParameterNames.cs b/src/SharpLearning.XGBoost/ParameterNames.cs index 9d899229..3c2f6dad 100644 --- a/src/SharpLearning.XGBoost/ParameterNames.cs +++ b/src/SharpLearning.XGBoost/ParameterNames.cs @@ -26,7 +26,7 @@ static class ParameterNames /// Specify the learning task and the corresponding learning objective or /// a custom objective function to be used(see note below) /// - public const string objective = "objective"; + public const string Objective = "objective"; /// /// Number of parallel threads used to run xgboost @@ -110,9 +110,9 @@ static class ParameterNames /// 'auto': Use heuristic to choose faster one. /// - For small to medium dataset, exact greedy will be used. /// - For very large-dataset, approximate algorithm will be chosen. - /// - Because old behavior is always use exact greedy in single machine, + /// - Because old behavior is always use exact greedy in single machine, /// user will get a message when approximate algorithm is chosen to notify this choice. - /// + /// /// 'exact': Exact greedy algorithm. /// 'approx': Approximate greedy algorithm using sketching and histogram. /// 'hist': Fast histogram optimized approximate greedy algorithm. It uses some performance improvements such as bins caching. @@ -150,9 +150,9 @@ static class ParameterNames public const string RateDrop = "rate_drop"; /// - /// One drop for DART. - /// When this flag is enabled, - /// at least one tree is always dropped during the dropout + /// One drop for DART. + /// When this flag is enabled, + /// at least one tree is always dropped during the dropout /// (allows Binomial-plus-one or epsilon-dropout from the original DART paper. /// public const string OneDrop = "one_drop"; diff --git a/src/SharpLearning.XGBoost/RegressionObjective.cs b/src/SharpLearning.XGBoost/RegressionObjective.cs index af035a29..1ff5ed1f 100644 --- a/src/SharpLearning.XGBoost/RegressionObjective.cs +++ b/src/SharpLearning.XGBoost/RegressionObjective.cs @@ -17,14 +17,14 @@ public enum RegressionObjective /// /// GPU version of linear regression evaluated on the GPU, - /// note that like the GPU histogram algorithm, + /// note that like the GPU histogram algorithm, /// they can only be used when the entire training session uses the same dataset. /// GPULinear, /// /// GPU version of logistic regression evaluated on the GPU, - /// note that like the GPU histogram algorithm, + /// note that like the GPU histogram algorithm, /// they can only be used when the entire training session uses the same dataset. /// GPULogistic, @@ -36,8 +36,8 @@ public enum RegressionObjective CountPoisson, /// - /// Cox regression for right censored survival time data (negative values are considered right censored). - /// Note that predictions are returned on the hazard ratio scale (i.e., as HR = exp(marginal_prediction) + /// Cox regression for right censored survival time data (negative values are considered right censored). + /// Note that predictions are returned on the hazard ratio scale (i.e., as HR = exp(marginal_prediction) /// in the proportional hazard function h(t) = h0(t) * HR). /// SurvivalCox, @@ -49,15 +49,15 @@ public enum RegressionObjective /// /// gamma regression with log-link.Output is a mean of gamma distribution. - /// It might be useful, e.g., for modeling insurance claims severity, + /// It might be useful, e.g., for modeling insurance claims severity, /// or for any outcome that might be gamma-distributed /// GammaRegression, /// - /// Tweedie regression with log-link.It might be useful, e.g., - /// for modeling total loss in insurance, + /// Tweedie regression with log-link.It might be useful, e.g., + /// for modeling total loss in insurance, /// or for any outcome that might be Tweedie-distributed. /// - TweedieRegression + TweedieRegression, } diff --git a/src/SharpLearning.XGBoost/SamplerType.cs b/src/SharpLearning.XGBoost/SamplerType.cs index 9e3a3b36..8aaec040 100644 --- a/src/SharpLearning.XGBoost/SamplerType.cs +++ b/src/SharpLearning.XGBoost/SamplerType.cs @@ -13,5 +13,5 @@ public enum SamplerType /// /// Dropped trees are selected in proportion to weight. /// - Weighted + Weighted, } diff --git a/src/SharpLearning.XGBoost/SamplerTypeExtensions.cs b/src/SharpLearning.XGBoost/SamplerTypeExtensions.cs index 0e7c535b..4b1cf876 100644 --- a/src/SharpLearning.XGBoost/SamplerTypeExtensions.cs +++ b/src/SharpLearning.XGBoost/SamplerTypeExtensions.cs @@ -3,7 +3,7 @@ namespace SharpLearning.XGBoost; /// -/// +/// /// public static class SamplerTypeExtensions { @@ -21,7 +21,7 @@ public static string ToXGBoostString(this SamplerType type) case SamplerType.Weighted: return "weighted"; default: - throw new ArgumentException("Unknown sampler type: " + type); ; + throw new ArgumentException("Unknown sampler type: " + type); } } } diff --git a/src/SharpLearning.XGBoost/TreeMethod.cs b/src/SharpLearning.XGBoost/TreeMethod.cs index 68b2188e..98042fb4 100644 --- a/src/SharpLearning.XGBoost/TreeMethod.cs +++ b/src/SharpLearning.XGBoost/TreeMethod.cs @@ -9,7 +9,7 @@ public enum TreeMethod /// Auto: Use heuristic to choose faster one. /// - For small to medium dataset, exact greedy will be used. /// - For very large-dataset, approximate algorithm will be chosen. - /// - Because old behavior is always use exact greedy in single machine, + /// - Because old behavior is always use exact greedy in single machine, /// Auto, @@ -36,5 +36,5 @@ public enum TreeMethod /// /// GPU implementation of hist algorithm. /// - GPUHist + GPUHist, } diff --git a/src/SharpLearning.XGBoost/TreeMethodExtensions.cs b/src/SharpLearning.XGBoost/TreeMethodExtensions.cs index 5da1bff0..dc1f4d26 100644 --- a/src/SharpLearning.XGBoost/TreeMethodExtensions.cs +++ b/src/SharpLearning.XGBoost/TreeMethodExtensions.cs @@ -3,7 +3,7 @@ namespace SharpLearning.XGBoost; /// -/// +/// /// public static class TreeMethodExtensions {