Skip to content

Commit a2e82ab

Browse files
committed
edits/doc update
1 parent a5b777c commit a2e82ab

File tree

1 file changed

+16
-10
lines changed

1 file changed

+16
-10
lines changed

ngclearn/museum/harmonium.py

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
class Harmonium:
1919
"""
20-
Structure for constructing the sparse coding model proposed in:
20+
Structure for constructing the Harmonium model proposed in:
2121
2222
Hinton, Geoffrey E. "Training products of experts by maximizing contrastive
2323
likelihood." Technical Report, Gatsby computational neuroscience unit (1999).
@@ -27,26 +27,29 @@ class Harmonium:
2727
| z0 -(z0-z1)-> z1
2828
| Note: z1-z0 = (z0-z1)^T (transpose-tied synapses)
2929
30-
Another important reference for designing stable harmoniums is here:
30+
Another important reference for designing stable Harmoniums is here:
3131
3232
Hinton, Geoffrey E. "A practical guide to training restricted Boltzmann
3333
machines." Neural networks: Tricks of the trade. Springer, Berlin,
3434
Heidelberg, 2012. 599-619.
3535
36+
Note: if you set the *samp_fx* to the "identity", you force the Harmonium to
37+
to work as a mean-field Harmonium/Botlzmann machine
38+
3639
Args:
37-
args: a Config dictionary containing necessary meta-parameters for the harmonium
40+
args: a Config dictionary containing necessary meta-parameters for the Harmonium
3841
3942
| DEFINITION NOTE:
4043
| args should contain values for the following:
4144
| * batch_size - the fixed batch-size to be fed into this model
42-
| * z_dim - # of latent variables in layers z1
45+
| * z_dim - # of latent variables in layer z1
4346
| * x_dim - # of latent variables in layer z0 (or sensory x)
4447
| * seed - number to control determinism of weight initialization
4548
| * wght_sd - standard deviation of Gaussian initialization of weights
4649
| * K - # of steps to take when conducting Contrastive Divergence
47-
| * act_fx - activation function for layers z1 (Default: sigmoid)
50+
| * act_fx - activation function for layer z1 (Default: sigmoid)
4851
| * out_fx - activation function for layer z0 (prediction of z0) (Default: sigmoid)
49-
52+
| * samp_fx - sampling function for layer z1 (Default = bernoulli)
5053
"""
5154
def __init__(self, args):
5255
self.args = args
@@ -57,6 +60,9 @@ def __init__(self, args):
5760

5861
seed = int(self.args.getArg("seed"))
5962
self.seed = seed
63+
samp_fx = "bernoulli"
64+
if self.args.hasArg("samp_fx") == True:
65+
samp_fx = self.args.getArg("samp_fx")
6066
#K = int(self.args.getArg("K"))
6167
act_fx = self.args.getArg("act_fx")
6268
out_fx = "sigmoid"
@@ -73,7 +79,7 @@ def __init__(self, args):
7379

7480
## set up positive phase nodes
7581
z1 = SNode(name="z1", dim=z_dim, beta=1, act_fx=act_fx, zeta=0.0,
76-
integrate_kernel=integrate_cfg, samp_fx="bernoulli")
82+
integrate_kernel=integrate_cfg, samp_fx=samp_fx)
7783
z0 = SNode(name="z0", dim=x_dim, beta=1, act_fx="identity", zeta=0.0,
7884
integrate_kernel=integrate_cfg)
7985
z0_z1 = z0.wire_to(z1, src_comp="phi(z)", dest_comp="dz_bu", cable_kernel=dcable_cfg)
@@ -98,11 +104,11 @@ def __init__(self, args):
98104

99105
# set up negative phase nodes
100106
z1n_i = SNode(name="z1n_i", dim=z_dim, beta=1, act_fx=act_fx, zeta=0.0,
101-
integrate_kernel=integrate_cfg, samp_fx="bernoulli")
107+
integrate_kernel=integrate_cfg, samp_fx=samp_fx)
102108
z0n = SNode(name="z0n", dim=x_dim, beta=1, act_fx=out_fx, zeta=0.0,
103-
integrate_kernel=integrate_cfg, samp_fx="bernoulli")
109+
integrate_kernel=integrate_cfg, samp_fx=samp_fx)
104110
z1n = SNode(name="z1n", dim=z_dim, beta=1, act_fx=act_fx, zeta=0.0,
105-
integrate_kernel=integrate_cfg, samp_fx="bernoulli")
111+
integrate_kernel=integrate_cfg, samp_fx=samp_fx)
106112
n1_n0 = z1n_i.wire_to(z0n, src_comp="S(z)", dest_comp="dz_td", mirror_path_kernel=(z0_z1,"A^T"),
107113
cable_kernel=dcable_cfg) # reuse A but create new b
108114
n0_n1 = z0n.wire_to(z1n, src_comp="phi(z)", dest_comp="dz_bu", mirror_path_kernel=(z0_z1,"A+b")) # reuse A & b

0 commit comments

Comments
 (0)