@@ -298,3 +298,106 @@ def update(self, pre_spike, post_v=None):
298298 return self .out (post_vs , post_v )
299299 else :
300300 return post_vs
301+
302+
303+ class Alpha (DualExponential ):
304+ r"""Alpha synapse model.
305+
306+ **Model Descriptions**
307+
308+ The analytical expression of alpha synapse is given by:
309+
310+ .. math::
311+
312+ g_{syn}(t)= g_{max} \frac{t-t_{s}}{\tau} \exp \left(-\frac{t-t_{s}}{\tau}\right).
313+
314+ While, this equation is hard to implement. So, let's try to convert it into the
315+ differential forms:
316+
317+ .. math::
318+
319+ \begin{aligned}
320+ &g_{\mathrm{syn}}(t)= g_{\mathrm{max}} g \\
321+ &\frac{d g}{d t}=-\frac{g}{\tau}+h \\
322+ &\frac{d h}{d t}=-\frac{h}{\tau}+\delta\left(t_{0}-t\right)
323+ \end{aligned}
324+
325+ **Model Examples**
326+
327+ .. plot::
328+ :include-source: True
329+
330+ >>> import brainpy as bp
331+ >>> from brainpy import neurons, synapses, synouts
332+ >>> import matplotlib.pyplot as plt
333+ >>>
334+ >>> neu1 = neurons.LIF(1)
335+ >>> neu2 = neurons.LIF(1)
336+ >>> syn1 = synapses.Alpha(neu1, neu2, bp.connect.All2All(), output=synouts.CUBA())
337+ >>> net = bp.Network(pre=neu1, syn=syn1, post=neu2)
338+ >>>
339+ >>> runner = bp.DSRunner(net, inputs=[('pre.input', 25.)], monitors=['pre.V', 'post.V', 'syn.g', 'syn.h'])
340+ >>> runner.run(150.)
341+ >>>
342+ >>> fig, gs = bp.visualize.get_figure(2, 1, 3, 8)
343+ >>> fig.add_subplot(gs[0, 0])
344+ >>> plt.plot(runner.mon.ts, runner.mon['pre.V'], label='pre-V')
345+ >>> plt.plot(runner.mon.ts, runner.mon['post.V'], label='post-V')
346+ >>> plt.legend()
347+ >>> fig.add_subplot(gs[1, 0])
348+ >>> plt.plot(runner.mon.ts, runner.mon['syn.g'], label='g')
349+ >>> plt.plot(runner.mon.ts, runner.mon['syn.h'], label='h')
350+ >>> plt.legend()
351+ >>> plt.show()
352+
353+ Parameters
354+ ----------
355+ conn: optional, ArrayType, dict of (str, ndarray), TwoEndConnector
356+ The synaptic connections.
357+ comp_method: str
358+ The connection type used for model speed optimization. It can be
359+ `sparse` and `dense`. The default is `sparse`.
360+ delay_step: int, ArrayType, Initializer, Callable
361+ The delay length. It should be the value of :math:`\mathrm{delay\_time / dt}`.
362+ tau_decay: float, ArrayType
363+ The time constant of the synaptic decay phase. [ms]
364+ g_max: float, ArrayType, Initializer, Callable
365+ The synaptic strength (the maximum conductance). Default is 1.
366+ name: str
367+ The name of this synaptic projection.
368+ method: str
369+ The numerical integration methods.
370+
371+ References
372+ ----------
373+
374+ .. [1] Sterratt, David, Bruce Graham, Andrew Gillies, and David Willshaw.
375+ "The Synapse." Principles of Computational Modelling in Neuroscience.
376+ Cambridge: Cambridge UP, 2011. 172-95. Print.
377+ """
378+
379+ def __init__ (
380+ self ,
381+ conn : Union [TwoEndConnector , ArrayType , Dict [str , ArrayType ]],
382+ out : Optional [SynOutNS ] = None ,
383+ stp : Optional [SynSTPNS ] = None ,
384+ comp_method : str = 'dense' ,
385+ g_max : Union [float , ArrayType , Initializer , Callable ] = 1. ,
386+ tau_decay : Union [float , ArrayType ] = 10.0 ,
387+ method : str = 'exp_auto' ,
388+
389+ # other parameters
390+ name : str = None ,
391+ mode : bm .Mode = None ,
392+ ):
393+ super ().__init__ (conn = conn ,
394+ comp_method = comp_method ,
395+ g_max = g_max ,
396+ tau_decay = tau_decay ,
397+ tau_rise = tau_decay ,
398+ method = method ,
399+ out = out ,
400+ stp = stp ,
401+ name = name ,
402+ mode = mode )
403+
0 commit comments