Skip to content

Commit 219aeec

Browse files
committed
Improve SetTransformer documentation
1 parent f1f19fd commit 219aeec

File tree

1 file changed

+3
-2
lines changed

1 file changed

+3
-2
lines changed

bayesflow/summary_networks.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ def __init__(
140140
# Construct final attention layer, which will perform cross-attention
141141
# between the outputs ot the self-attention layers and the dynamic template
142142
if bidirectional:
143-
final_input_dim = template_dim*2
143+
final_input_dim = template_dim * 2
144144
else:
145145
final_input_dim = template_dim
146146
self.output_attention = MultiHeadAttentionBlock(
@@ -184,7 +184,8 @@ def call(self, x, **kwargs):
184184

185185
class SetTransformer(tf.keras.Model):
186186
"""Implements the set transformer architecture from [1] which ultimately represents
187-
a learnable permutation-invariant function.
187+
a learnable permutation-invariant function. Designed to naturally model interactions in
188+
the input set, which may be hard to capture with the simpler ``DeepSet`` architecture.
188189
189190
[1] Lee, J., Lee, Y., Kim, J., Kosiorek, A., Choi, S., & Teh, Y. W. (2019).
190191
Set transformer: A framework for attention-based permutation-invariant neural networks.

0 commit comments

Comments
 (0)