@@ -34,7 +34,28 @@ def __init__(
3434
3535 Parameters
3636 ----------
37- #TODO
37+ num_inducing_points : int, optional
38+ The number of inducing points for set-based dimensionality reduction.
39+ embed_dim : int, optional
40+ Dimensionality of the embedding space, by default 64.
41+ num_heads : int, optional
42+ Number of attention heads, by default 4.
43+ dropout : float, optional
44+ Dropout rate applied to attention and MLP layers, by default 0.05.
45+ mlp_depth : int, optional
46+ Number of layers in the feedforward MLP block, by default 2.
47+ mlp_width : int, optional
48+ Width of each hidden layer in the MLP block, by default 128.
49+ mlp_activation : str, optional
50+ Activation function used in the MLP block, by default "gelu".
51+ kernel_initializer : str, optional
52+ Initializer for kernel weights, by default "he_normal".
53+ use_bias : bool, optional
54+ Whether to include bias terms in dense layers, by default True.
55+ layer_norm : bool, optional
56+ Whether to apply layer normalization before and after attention, by default True.
57+ **kwargs : dict
58+ Additional keyword arguments passed to the Keras Layer base class.
3859 """
3960
4061 super ().__init__ (** kwargs )
0 commit comments