You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Copy file name to clipboardExpand all lines: docs/assets/pydvl.bib
+16-1Lines changed: 16 additions & 1 deletion
Original file line number
Diff line number
Diff line change
@@ -24,7 +24,7 @@ @article{benmerzoug_re_2023
24
24
doi = {10.5281/zenodo.8173733},
25
25
url = {https://zenodo.org/record/8173733},
26
26
urldate = {2023-08-27},
27
-
abstract = {Replication}
27
+
abstract = {We investigate the results of [1] in the field of data valuation. We repeat their experiments and conclude that the (Monte Carlo) Least Core is sensitive to important characteristics of the ML problem of interest, making it difficult to apply.},
title = {Sampling {{Permutations}} for {{Shapley Value Estimation}}},
203
+
author = {Mitchell, Rory and Cooper, Joshua and Frank, Eibe and Holmes, Geoffrey},
204
+
date = {2022},
205
+
journaltitle = {Journal of Machine Learning Research},
206
+
shortjournal = {J. Mach. Learn. Res.},
207
+
volume = {23},
208
+
number = {43},
209
+
pages = {1--46},
210
+
issn = {1533-7928},
211
+
url = {http://jmlr.org/papers/v23/21-0439.html},
212
+
urldate = {2022-10-23},
213
+
abstract = {Game-theoretic attribution techniques based on Shapley values are used to interpret black-box machine learning models, but their exact calculation is generally NP-hard, requiring approximation methods for non-trivial models. As the computation of Shapley values can be expressed as a summation over a set of permutations, a common approach is to sample a subset of these permutations for approximation. Unfortunately, standard Monte Carlo sampling methods can exhibit slow convergence, and more sophisticated quasi-Monte Carlo methods have not yet been applied to the space of permutations. To address this, we investigate new approaches based on two classes of approximation methods and compare them empirically. First, we demonstrate quadrature techniques in a RKHS containing functions of permutations, using the Mallows kernel in combination with kernel herding and sequential Bayesian quadrature. The RKHS perspective also leads to quasi-Monte Carlo type error bounds, with a tractable discrepancy measure defined on permutations. Second, we exploit connections between the hypersphere S d−2 Sd−2 and permutations to create practical algorithms for generating permutation samples with good properties. Experiments show the above techniques provide significant improvements for Shapley value estimates over existing methods, converging to a smaller RMSE in the same number of model evaluations.}
214
+
}
215
+
201
216
@inproceedings{okhrati_multilinear_2021,
202
217
title = {A {{Multilinear Sampling Algorithm}} to {{Estimate Shapley Values}}},
203
218
booktitle = {2020 25th {{International Conference}} on {{Pattern Recognition}} ({{ICPR}})},
0 commit comments