@@ -51,7 +51,7 @@ def _process_pool_init(parallelism, dinf_model):
5151 _pool = ctx .Pool (
5252 processes = parallelism ,
5353 initializer = _initializer ,
54- initargs = (dinf_model ._filename ,),
54+ initargs = (dinf_model .filename ,),
5555 )
5656
5757
@@ -408,7 +408,7 @@ def _train_discriminator(
408408 )
409409 train_x , train_y , train_x_generator = _generate_training_data (
410410 target = dinf_model .target_func ,
411- generator = dinf_model .generator_func ,
411+ generator = dinf_model .generator_func_v ,
412412 thetas = training_thetas ,
413413 parallelism = parallelism ,
414414 ss = ss_train ,
@@ -418,7 +418,7 @@ def _train_discriminator(
418418 if test_thetas is not None and len (test_thetas ) > 0 :
419419 val_x , val_y , val_x_generator = _generate_training_data (
420420 target = dinf_model .target_func ,
421- generator = dinf_model .generator_func ,
421+ generator = dinf_model .generator_func_v ,
422422 thetas = test_thetas ,
423423 parallelism = parallelism ,
424424 ss = ss_val ,
@@ -593,7 +593,7 @@ def predict(
593593 replicates , rng = np .random .default_rng (ss_thetas )
594594 )
595595 x = _generate_data (
596- generator = dinf_model .generator_func ,
596+ generator = dinf_model .generator_func_v ,
597597 thetas = thetas ,
598598 parallelism = parallelism ,
599599 rng = np .random .default_rng (ss_generator ),
@@ -734,7 +734,7 @@ def mcmc_gan(
734734 log_prob_func = functools .partial (
735735 _log_prob ,
736736 discriminator = discriminator ,
737- generator = dinf_model .generator_func ,
737+ generator = dinf_model .generator_func_v ,
738738 parameters = parameters ,
739739 parallelism = parallelism ,
740740 num_replicates = Dx_replicates ,
@@ -815,10 +815,10 @@ def sample_smooth(
815815 """
816816 Sample from a smoothed set of weighted observations.
817817
818- Samples are drawn from the thetas, weighted by their probability.
818+ Samples are drawn from `` thetas`` , weighted by their probability.
819819 New points are drawn within a neighbourhood of the sampled thetas
820820 using a mulivariate normal whose covariance is calculated from the
821- thetas. This is effectively sampling from a Gaussian KDE, but
821+ thetas. This is equivalent to sampling from a Gaussian KDE, but
822822 avoids doing an explicit density estimation.
823823 Scott's rule of thumb is used for bandwidth selection.
824824
@@ -843,11 +843,14 @@ def sample_smooth(
843843 * "transform": thetas are transformed before sampling, and
844844 the sampled values are inverse-transformed before being
845845 returned.
846+ See :meth:`Parameters.transform` and :meth:`Parameters.itransform`.
846847 * "truncate": sampled values are truncated at the parameter limits.
848+ See :meth:`Parameters.truncate`.
847849 * "reflect": sample values that are out of bounds are reflected
848850 inside the parameter limits by the same magnitude that they were
849851 out of bounds. Values that are too far out of bounds to be
850852 reflected are truncated at the parameter limits.
853+ See :meth:`Parameters.reflect`.
851854
852855 :return:
853856 The sampled values.
@@ -1172,7 +1175,7 @@ def pretraining_dinf(
11721175 lp = _log_prob (
11731176 thetas ,
11741177 discriminator = discriminator ,
1175- generator = dinf_model .generator_func ,
1178+ generator = dinf_model .generator_func_v ,
11761179 parameters = parameters ,
11771180 num_replicates = 1 ,
11781181 parallelism = parallelism ,
@@ -1476,7 +1479,7 @@ def pg_gan(
14761479 lp = _log_prob (
14771480 proposal_thetas ,
14781481 discriminator = discriminator ,
1479- generator = dinf_model .generator_func ,
1482+ generator = dinf_model .generator_func_v ,
14801483 parameters = parameters ,
14811484 num_replicates = Dx_replicates ,
14821485 parallelism = parallelism ,
0 commit comments