Skip to content

Commit 47478f6

Browse files
committed
mise en production de 2 papiers (El Masri et al, Lefort et al)
1 parent 2fbd80e commit 47478f6

File tree

1 file changed

+32
-0
lines changed

1 file changed

+32
-0
lines changed

_bibliography/in_production.bib

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,3 +13,35 @@ @article{adrat_repulsion
1313
keywords = {classification, point process, repulsion},
1414
issn = {2824-7795}
1515
}
16+
17+
@article{elmasri-optimal,
18+
bibtex_show = {true},
19+
author = {El Masri, Maxime and Morio, Jérôme and Simatos, Florian},
20+
title = {{Optimal projection for parametric importance sampling in high dimensions}},
21+
journal = {Computo},
22+
year = 2024,
23+
abstract = {In this paper we propose a dimension-reduction strategy in order to improve the performance of importance sampling in high dimension. The idea is to estimate variance terms in a small number of suitably chosen directions. We first prove that the optimal directions, i.e., the ones that minimize the Kullback--Leibler divergence with the optimal auxiliary density, are the eigenvectors associated to extreme (small or large) eigenvalues of the optimal covariance matrix. We then perform extensive numerical experiments that show that as dimension increases, these directions give estimations which are very close to optimal. Moreover, we show that the estimation remains accurate even when a simple empirical estimator of the covariance matrix is used to estimate these directions. These theoretical and numerical results open the way for different generalizations, in particular the incorporation of such ideas in adaptive importance sampling schemes},
24+
doi = {doi.org/10.57750/jjza-6j82},
25+
repository = {published-202402-elmasri-optimal},
26+
type = {{Research article}},
27+
language = {Python},
28+
domain = {Statistics},
29+
keywords = {Rare event simulation, Parameter estimation, Importance sampling, Dimension reduction, Kullback--Leibler divergence, Projection},
30+
issn = {2824-7795}
31+
}
32+
33+
@article{lefort_peerannot,
34+
bibtex_show = {true},
35+
author = {Lefort, Tanguy and Charlier, Benjamin and Joly, Alexis and Salmon, Joseph},
36+
title = {{Peerannot: classification for crowdsourced image datasets with Python}},
37+
journal = {Computo},
38+
year = 2024,
39+
abstract = {Crowdsourcing is a quick and easy way to collect labels for large datasets, involving many workers. However, workers often disagree with each other. Sources of error can arise from the workers’ skills, but also from the intrinsic difficulty of the task. We present peerannot: a Python library for managing and learning from crowdsourced labels for classification. Our library allows users to aggregate labels from common noise models or train a deep learning-based classifier directly from crowdsourced labels. In addition, we provide an identification module to easily explore the task difficulty of datasets and worker capabilities.},
40+
doi = {},
41+
repository = {},
42+
type = {{Research article}},
43+
language = {Python},
44+
domain = {Machine Learning},
45+
keywords = {},
46+
issn = {2824-7795}
47+
}

0 commit comments

Comments
 (0)