Skip to content

Commit 9e9b28b

Browse files
committed
improve specs
1 parent 564c99c commit 9e9b28b

13 files changed

+114
-10
lines changed

doc/specs/stdlib_specialfunctions_activations.md

Lines changed: 25 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
---
2-
title: specialfunctions
2+
title: specialfunctions_activations
33
---
44

55
# Special functions - Neural Networks activations and their gradients
@@ -337,7 +337,7 @@ scale * (\alpha * exp(x) - \alpha ), & \text{otherwise}
337337
\end{cases}
338338
$$
339339
Where,
340-
$$scale = 1.0507009873554804934193349852946 \text{and} \alpha = 1.6732632423543772848170429916717$$
340+
$scale = 1.0507009873554804934193349852946$ and $\alpha = 1.6732632423543772848170429916717$
341341

342342
### Syntax
343343

@@ -406,7 +406,7 @@ $$f(x) = \frac{1}{1+\exp(-x)} $$
406406

407407
### Syntax
408408

409-
`result = ` [[stdlib_specialfunctions(module):Sigmoid(interface)]] ` (x)`
409+
`result = ` [[stdlib_specialfunctions(module):sigmoid(interface)]] ` (x)`
410410

411411
### Class
412412

@@ -433,7 +433,7 @@ $$f(x) = \frac{\exp(x)}{(1+\exp(x))^2} $$
433433

434434
### Syntax
435435

436-
`result = ` [[stdlib_specialfunctions(module):Sigmoid_grad(interface)]] ` (x)`
436+
`result = ` [[stdlib_specialfunctions(module):sigmoid_grad(interface)]] ` (x)`
437437

438438
### Class
439439

@@ -525,7 +525,7 @@ $$
525525

526526
### Syntax
527527

528-
`result = ` [[stdlib_specialfunctions(module):Step(interface)]] ` (x)`
528+
`result = ` [[stdlib_specialfunctions(module):step(interface)]] ` (x)`
529529

530530
### Class
531531

@@ -539,6 +539,11 @@ Elemental function
539539

540540
The function returns a value with the same type and kind as input argument.
541541

542+
### Example
543+
```fortran
544+
{!example/specialfunctions_activations/example_step.f90!}
545+
```
546+
542547
## `Step_grad` - Gradient of the Step function
543548

544549
### Status
@@ -552,7 +557,7 @@ $$f(x) = 0 $$
552557

553558
### Syntax
554559

555-
`result = ` [[stdlib_specialfunctions(module):Step_grad(interface)]] ` (x)`
560+
`result = ` [[stdlib_specialfunctions(module):step_grad(interface)]] ` (x)`
556561

557562
### Class
558563

@@ -579,7 +584,7 @@ $$f(x) = \frac{\exp(x)-\text{max}(x_j)}{\sum_j{\exp(x)-\text{max}(x_j)}}$$
579584

580585
### Syntax
581586

582-
`result = ` [[stdlib_specialfunctions(module):Softmax(interface)]] ` (x,dim)`
587+
`result = ` [[stdlib_specialfunctions(module):softmax(interface)]] ` (x,dim)`
583588

584589
### Class
585590

@@ -594,6 +599,11 @@ Pure function for ranks 1 to 4.
594599

595600
The function returns an array with the same rank and kind as the input argument `x`.
596601

602+
### Example
603+
```fortran
604+
{!example/specialfunctions_activations/example_softmax.f90!}
605+
```
606+
597607
## `Softmax_grad` - Gradient of the Softmax function
598608

599609
### Status
@@ -607,7 +617,7 @@ $$f(x,dim) = \text{Softmax}(x,dim)*(1-\text{Softmax}(x,dim)) $$
607617

608618
### Syntax
609619

610-
`result = ` [[stdlib_specialfunctions(module):Softmax_grad(interface)]] ` (x,dim)`
620+
`result = ` [[stdlib_specialfunctions(module):softmax_grad(interface)]] ` (x,dim)`
611621

612622
### Class
613623

@@ -635,7 +645,7 @@ $$f(x) = \log(\exp(x)+1)$$
635645

636646
### Syntax
637647

638-
`result = ` [[stdlib_specialfunctions(module):Softplus(interface)]] ` (x)`
648+
`result = ` [[stdlib_specialfunctions(module):softplus(interface)]] ` (x)`
639649

640650
### Class
641651

@@ -649,6 +659,11 @@ Elemental function
649659

650660
The function returns a value with the same type and kind as input argument.
651661

662+
### Example
663+
```fortran
664+
{!example/specialfunctions_activations/example_softplus.f90!}
665+
```
666+
652667
## `Softplus_grad` - Gradient of the Softplus function
653668

654669
### Status
@@ -662,7 +677,7 @@ $$f(x) = \frac{\exp(x)}{\exp(x)+1} $$
662677

663678
### Syntax
664679

665-
`result = ` [[stdlib_specialfunctions(module):Softplus_grad(interface)]] ` (x)`
680+
`result = ` [[stdlib_specialfunctions(module):softplus_grad(interface)]] ` (x)`
666681

667682
### Class
668683

example/specialfunctions_activations/CMakeLists.txt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,3 +4,7 @@ ADD_EXAMPLE(gelu)
44
ADD_EXAMPLE(relu)
55
ADD_EXAMPLE(selu)
66
ADD_EXAMPLE(silu)
7+
ADD_EXAMPLE(softmax)
8+
ADD_EXAMPLE(logsoftmax)
9+
ADD_EXAMPLE(softplus)
10+
ADD_EXAMPLE(step)

example/specialfunctions_activations/example_elu.f90

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,5 +9,6 @@ program example_elu
99

1010
x = linspace(-2._sp, 2._sp, n)
1111
y = elu( x , 1.0 )
12+
print *, y
1213
end program example_elu
1314

example/specialfunctions_activations/example_gaussian.f90

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,5 +9,6 @@ program example_gaussian
99

1010
x = linspace(-2._sp, 2._sp, n)
1111
y = gaussian( x )
12+
print *, y
1213
end program example_gaussian
1314

example/specialfunctions_activations/example_gelu.f90

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,5 +9,6 @@ program example_gelu
99

1010
x = linspace(-2._sp, 2._sp, n)
1111
y = gelu( x )
12+
print *, y
1213
end program example_gelu
1314

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
program example_logsoftmax
2+
use stdlib_kinds, only: sp
3+
use stdlib_math, only: linspace
4+
use stdlib_specialfunctions, only: logsoftmax
5+
6+
integer, parameter :: n = 10
7+
real(sp) :: x(n), y(n)
8+
implicit none
9+
10+
x = linspace(-2._sp, 2._sp, n)
11+
y = logsoftmax( x )
12+
print *, y
13+
end program example_logsoftmax
14+

example/specialfunctions_activations/example_relu.f90

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,5 +9,6 @@ program example_relu
99

1010
x = linspace(-2._sp, 2._sp, n)
1111
y = relu( x )
12+
print *, y
1213
end program example_relu
1314

example/specialfunctions_activations/example_selu.f90

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,5 +9,6 @@ program example_selu
99

1010
x = linspace(-2._sp, 2._sp, n)
1111
y = selu( x )
12+
print *, y
1213
end program example_selu
1314

example/specialfunctions_activations/example_silu.f90

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,5 +9,6 @@ program example_silu
99

1010
x = linspace(-2._sp, 2._sp, n)
1111
y = silu( x )
12+
print *, y
1213
end program example_silu
1314

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
program example_softmax
2+
use stdlib_kinds, only: sp
3+
use stdlib_math, only: linspace
4+
use stdlib_specialfunctions, only: softmax
5+
6+
integer, parameter :: n = 10
7+
real(sp) :: x(n), y(n)
8+
implicit none
9+
10+
x = linspace(-2._sp, 2._sp, n)
11+
y = softmax( x )
12+
print *, y
13+
end program example_softmax
14+

0 commit comments

Comments
 (0)