Skip to content

Commit 7141a21

Browse files
committed
new entries in published papers an in production manuscripts
1 parent 9287fc5 commit 7141a21

File tree

2 files changed

+22
-7
lines changed

2 files changed

+22
-7
lines changed

_bibliography/in_production.bib

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,14 @@
1-
@article{delattre_fim,
1+
@article{cleynen_local,
22
bibtex_show = {true},
3-
author = {Delattre, Maud and Kuhn, Estelle},
4-
title = {{Computing an empirical Fisher information matrix estimate in latent variable models through stochastic approximation}},
3+
author = {Cleynen, Alice and Raynal, Louis and Marin, Jean-Michel},
4+
title = {{Local tree methods for classification: a review and some dead ends}},
55
journal = {Computo},
66
year = 2023,
7-
abstract = {The Fisher information matrix (FIM) is a key quantity in statistics. However its exact computation is often not trivial. In particular in many latent variable models, it is intricated due to the presence of unobserved variables. Several methods have been proposed to approximate the FIM when it can not be evaluated analytically. Different estimates have been considered, in particular moment estimates. However some of them require to compute second derivatives of the complete data log-likelihood which leads to some disadvantages. In this paper, we focus on the empirical Fisher information matrix defined as an empirical estimate of the covariance matrix of the score, which only requires to compute the first derivatives of the log-likelihood. Our contribution consists in presenting a new numerical method to evaluate this empirical Fisher information matrix in latent variable model when the proposed estimate can not be directly analytically evaluated. We propose a stochastic approximation estimation algorithm to compute this estimate as a by-product of the parameter estimate. We evaluate the finite sample size properties of the proposed estimate and the convergence properties of the estimation algorithm through simulation studies.},
8-
doi = {10.57750/r5gx-jk62},
9-
repository = {published-202311-delattre-fim},
7+
abstract = {Random Forests (RF) [@breiman:2001] are very popular machine learning methods. They perform well even with little or no tuning, and have some theoretical guarantees, especially for sparse problems [@biau:2012;@scornet:etal:2015]. These learning strategies have been used in several contexts, also outside the field of classification and regression. To perform Bayesian model selection in the case of intractable likelihoods, the ABC Random Forests (ABC-RF) strategy of @pudlo:etal:2016 consists in applying Random Forests on training sets composed of simulations coming from the Bayesian generative models. The ABC-RF technique is based on an underlying RF for which the training and prediction phases are separated. The training phase does not take into account the data to be predicted. This seems to be suboptimal as in the ABC framework only one observation is of interest for the prediction. In this paper, we study tree-based methods that are built to predict a specific instance in a classification setting. This type of methods falls within the scope of local (lazy/instance-based/case specific) classification learning. We review some existing strategies and propose two new ones. The first consists in modifying the tree splitting rule by using kernels, the second in using a first RF to compute some local variable importance that is used to train a second, more local, RF. Unfortunately, these approaches, although interesting, do not provide conclusive results.},
8+
repository = {published-202312-cleynen-local},
109
type = {{Research article}},
1110
language = {R},
1211
domain = {Statistics},
13-
keywords = {Model-based standard error, moment estimate, Fisher identity, stochastic approximation algorithm},
12+
keywords = {},
1413
issn = {2824-7795},
1514
}

_bibliography/published.bib

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,19 @@
1+
@article{delattre_fim,
2+
bibtex_show = {true},
3+
author = {Delattre, Maud and Kuhn, Estelle},
4+
title = {{Computing an empirical Fisher information matrix estimate in latent variable models through stochastic approximation}},
5+
journal = {Computo},
6+
year = 2023,
7+
abstract = {The Fisher information matrix (FIM) is a key quantity in statistics. However its exact computation is often not trivial. In particular in many latent variable models, it is intricated due to the presence of unobserved variables. Several methods have been proposed to approximate the FIM when it can not be evaluated analytically. Different estimates have been considered, in particular moment estimates. However some of them require to compute second derivatives of the complete data log-likelihood which leads to some disadvantages. In this paper, we focus on the empirical Fisher information matrix defined as an empirical estimate of the covariance matrix of the score, which only requires to compute the first derivatives of the log-likelihood. Our contribution consists in presenting a new numerical method to evaluate this empirical Fisher information matrix in latent variable model when the proposed estimate can not be directly analytically evaluated. We propose a stochastic approximation estimation algorithm to compute this estimate as a by-product of the parameter estimate. We evaluate the finite sample size properties of the proposed estimate and the convergence properties of the estimation algorithm through simulation studies.},
8+
doi = {10.57750/r5gx-jk62},
9+
repository = {published-202311-delattre-fim},
10+
type = {{Research article}},
11+
language = {R},
12+
domain = {Statistics},
13+
keywords = {Model-based standard error, moment estimate, Fisher identity, stochastic approximation algorithm},
14+
issn = {2824-7795},
15+
}
16+
117
@article{sanou_multiscale,
218
bibtex_show = {true},
319
author = {Sanou, Edmond and Ambroise, Christophe and Robin, Geneviève},

0 commit comments

Comments
 (0)