@@ -158,7 +158,7 @@ <h3 id="supported-models">Supported models</h3>
158158</ tr >
159159< tr >
160160< td style ="text-align: left; "> Tree GAM</ td >
161- < td > < a href ="https://csinva.io/imodels/algebraic/gam .html "> 🗂️</ a > , < a href ="https://github.com/interpretml/interpret "> 🔗</ a > , < a href ="https://dl.acm.org/doi/abs/10.1145/2339530.2339556 "> 📄</ a > </ td >
161+ < td > < a href ="https://csinva.io/imodels/algebraic/tree_gam .html "> 🗂️</ a > , < a href ="https://github.com/interpretml/interpret "> 🔗</ a > , < a href ="https://dl.acm.org/doi/abs/10.1145/2339530.2339556 "> 📄</ a > </ td >
162162< td > Generalized additive model fit with short boosted trees</ td >
163163</ tr >
164164< tr >
@@ -363,6 +363,12 @@ <h2 id="support-for-different-tasks">Support for different tasks</h2>
363363< td > Requires extra dependencies for speed</ td >
364364</ tr >
365365< tr >
366+ < td style ="text-align: left; "> Tree GAM</ td >
367+ < td style ="text-align: center; "> < a href ="https://csinva.io/imodels/algebraic/tree_gam.html "> TreeGAMClassifier</ a > </ td >
368+ < td style ="text-align: center; "> < a href ="https://csinva.io/imodels/algebraic/tree_gam.html "> TreeGAMRegressor</ a > </ td >
369+ < td > </ td >
370+ </ tr >
371+ < tr >
366372< td style ="text-align: left; "> Greedy tree sums (FIGS)</ td >
367373< td style ="text-align: center; "> < a href ="https://csinva.io/imodels/tree/figs.html#imodels.tree.figs.FIGSClassifier "> FIGSClassifier</ a > </ td >
368374< td style ="text-align: center; "> < a href ="https://csinva.io/imodels/tree/figs.html#imodels.tree.figs.FIGSRegressor "> FIGSRegressor</ a > </ td >
@@ -444,6 +450,9 @@ <h3 id="hierarchical-shrinkage-post-hoc-regularization-for-tree-based-methods">H
444450< p align ="center ">
445451< i > < b > HS Example.</ b > HS applies post-hoc regularization to any decision tree by shrinking each node towards its parent.</ i >
446452</ p >
453+ < h3 id ="mdi-a-flexible-random-forest-based-feature-importance-framework "> MDI+: A Flexible Random Forest-Based Feature Importance Framework</ h3 >
454+ < p > < a href ="https://arxiv.org/pdf/2307.01932.pdf "> 📄 Paper</ a > , < a href ="https://scholar.google.com/scholar?hl=en&as_sdt=0%2C23&q=MDI%2B%3A+A+Flexible+Random+Forest-Based+Feature+Importance+Framework&btnG=#d=gs_cit&t=1690399844081&u=%2Fscholar%3Fq%3Dinfo%3Axc0LcHXE_lUJ%3Ascholar.google.com%2F%26output%3Dcite%26scirp%3D0%26hl%3Den "> 📌 Citation</ a > </ p >
455+ < p > MDI+ is a novel feature importance framework, which generalizes the popular mean decrease in impurity (MDI) importance score for random forests. At its core, MDI+ expands upon a recently discovered connection between linear regression and decision trees. In doing so, MDI+ enables practitioners to (1) tailor the feature importance computation to the data/problem structure and (2) incorporate additional features or knowledge to mitigate known biases of decision trees. In both real data case studies and extensive real-data-inspired simulations, MDI+ outperforms commonly used feature importance measures (e.g., MDI, permutation-based scores, and TreeSHAP) by substantional margins.</ p >
447456< h2 id ="references "> References</ h2 >
448457< details >
449458< summary > Readings</ summary >
@@ -511,7 +520,7 @@ <h2 id="references">References</h2>
511520# Github repo available [here](https://github.com/csinva/imodels)
512521
513522from .algebraic.slim import SLIMRegressor, SLIMClassifier
514- from .algebraic.gam import TreeGAMClassifier
523+ from .algebraic.tree_gam import TreeGAMClassifier, TreeGAMRegressor
515524from .discretization.discretizer import RFDiscretizer, BasicDiscretizer
516525from .discretization.mdlp import MDLPDiscretizer, BRLDiscretizer
517526from .experimental.bartpy import BART
@@ -529,27 +538,64 @@ <h2 id="references">References</h2>
529538from .rule_set.skope_rules import SkopeRulesClassifier
530539from .rule_set.slipper import SlipperClassifier
531540from .tree.c45_tree.c45_tree import C45TreeClassifier
532- from .tree.cart_ccp import DecisionTreeCCPClassifier, DecisionTreeCCPRegressor, HSDecisionTreeCCPClassifierCV, \
533- HSDecisionTreeCCPRegressorCV
541+ from .tree.cart_ccp import (
542+ DecisionTreeCCPClassifier,
543+ DecisionTreeCCPRegressor,
544+ HSDecisionTreeCCPClassifierCV,
545+ HSDecisionTreeCCPRegressorCV,
546+ )
547+
534548# from .tree.iterative_random_forest.iterative_random_forest import IRFClassifier
535549# from .tree.optimal_classification_tree import OptimalTreeModel
536550from .tree.cart_wrapper import GreedyTreeClassifier, GreedyTreeRegressor
537551from .tree.figs import FIGSRegressor, FIGSClassifier, FIGSRegressorCV, FIGSClassifierCV
538552from .tree.gosdt.pygosdt import OptimalTreeClassifier
539- from .tree.gosdt.pygosdt_shrinkage import HSOptimalTreeClassifier, HSOptimalTreeClassifierCV
540- from .tree.hierarchical_shrinkage import HSTreeRegressor, HSTreeClassifier, HSTreeRegressorCV, HSTreeClassifierCV
553+ from .tree.gosdt.pygosdt_shrinkage import (
554+ HSOptimalTreeClassifier,
555+ HSOptimalTreeClassifierCV,
556+ )
557+ from .tree.hierarchical_shrinkage import (
558+ HSTreeRegressor,
559+ HSTreeClassifier,
560+ HSTreeRegressorCV,
561+ HSTreeClassifierCV,
562+ )
541563from .tree.tao import TaoTreeClassifier, TaoTreeRegressor
542564from .util.data_util import get_clean_dataset
543565from .util.distillation import DistilledRegressor
544566from .util.explain_errors import explain_classification_errors
545567
546- CLASSIFIERS = [BayesianRuleListClassifier, GreedyRuleListClassifier, SkopeRulesClassifier,
547- BoostedRulesClassifier, SLIMClassifier, SlipperClassifier, BayesianRuleSetClassifier,
548- C45TreeClassifier, OptimalTreeClassifier, OptimalRuleListClassifier, OneRClassifier,
549- SlipperClassifier, RuleFitClassifier, TaoTreeClassifier,
550- FIGSClassifier, HSTreeClassifier, HSTreeClassifierCV] # , IRFClassifier
551- REGRESSORS = [RuleFitRegressor, SLIMRegressor, GreedyTreeClassifier, FIGSRegressor,
552- TaoTreeRegressor, HSTreeRegressor, HSTreeRegressorCV, BART]
568+ CLASSIFIERS = [
569+ BayesianRuleListClassifier,
570+ GreedyRuleListClassifier,
571+ SkopeRulesClassifier,
572+ BoostedRulesClassifier,
573+ SLIMClassifier,
574+ SlipperClassifier,
575+ BayesianRuleSetClassifier,
576+ C45TreeClassifier,
577+ OptimalTreeClassifier,
578+ OptimalRuleListClassifier,
579+ OneRClassifier,
580+ SlipperClassifier,
581+ RuleFitClassifier,
582+ TaoTreeClassifier,
583+ TreeGAMClassifier,
584+ FIGSClassifier,
585+ HSTreeClassifier,
586+ HSTreeClassifierCV,
587+ ] # , IRFClassifier
588+ REGRESSORS = [
589+ RuleFitRegressor,
590+ SLIMRegressor,
591+ GreedyTreeClassifier,
592+ FIGSRegressor,
593+ TaoTreeRegressor,
594+ TreeGAMRegressor,
595+ HSTreeRegressor,
596+ HSTreeRegressorCV,
597+ BART,
598+ ]
553599ESTIMATORS = CLASSIFIERS + REGRESSORS
554600DISCRETIZERS = [RFDiscretizer, BasicDiscretizer, MDLPDiscretizer, BRLDiscretizer]</ code > </ pre >
555601</ details >
@@ -609,7 +655,14 @@ <h1>Index 🔍</h1>
609655< li > < a href ="#support-for-different-tasks "> Support for different tasks</ a > < ul >
610656< li > < a href ="#extras "> Extras</ a > </ li >
611657</ ul >
612- </ li > < li > < a href ="#references "> References</ a > </ li >
658+ </ li >
659+ < li > < a href ="#our-favorite-models "> Our favorite models</ a > < ul >
660+ < li > < a href ="#figs-fast-interpretable-greedy-tree-sums "> FIGS: Fast interpretable greedy-tree sums</ a > </ li >
661+ < li > < a href ="#hierarchical-shrinkage-post-hoc-regularization-for-tree-based-methods "> Hierarchical shrinkage: post-hoc regularization for tree-based methods</ a > </ li >
662+ < li > < a href ="#mdi-a-flexible-random-forest-based-feature-importance-framework "> MDI+: A Flexible Random Forest-Based Feature Importance Framework</ a > </ li >
663+ </ ul >
664+ </ li >
665+ < li > < a href ="#references "> References</ a > </ li >
613666</ ul >
614667</ div >
615668< ul id ="index ">
0 commit comments