1111@serializable (package = "bayesflow.adapters" )
1212class Standardize (ElementwiseTransform ):
1313 """
14- Transform that when applied standardizes data using typical z-score standardization i.e. for some unstandardized
15- data x the standardized version z would be
14+ Transform that when applied standardizes data using typical z-score standardization
15+ i.e. for some unstandardized data x the standardized version z would be
1616
1717 >>> z = (x - mean(x)) / std(x)
1818
@@ -27,6 +27,38 @@ class Standardize(ElementwiseTransform):
2727 standardization happens individually for each dimension
2828 momentum : float in (0,1)
2929 The momentum during training
30+
31+ Examples
32+ --------
33+ 1) Standardize all variables using their individually estimated mean and stds.
34+
35+ >>> adapter = (
36+ bf.adapters.Adapter()
37+ .standardize()
38+ )
39+
40+
41+ 2) Standardize all with same known mean and std.
42+
43+ >>> adapter = (
44+ bf.adapters.Adapter()
45+ .standardize(mean = 5, sd = 10)
46+ )
47+
48+
49+ 3) Mix of fixed and estimated means/stds. Suppose we have priors for "beta" and "sigma" where we
50+ know the means and stds. However for all other variables, the means and stds are unknown.
51+ Then standardize should be used in several stages specifying which variables to include or exclude.
52+
53+ >>> adapter = (
54+ bf.adapters.Adapter()
55+ # mean fixed, std estimated
56+ .standardize(include = "beta", mean = 1)
57+ # both mean and SD fixed
58+ .standardize(include = "sigma", mean = 0.6, sd = 3)
59+ # both means and stds estimated for all other variables
60+ .standardize(exclude = ["beta", "sigma"])
61+ )
3062 """
3163
3264 def __init__ (
0 commit comments