Skip to content

Commit c89c36f

Browse files
committed
Resolve conflicts with main
2 parents f25f2c1 + bed0f51 commit c89c36f

9 files changed

+135
-125
lines changed

src/nf/nf_conv1d_layer_submodule.f90

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ module function conv1d_layer_cons(filters, kernel_size, activation, stride) resu
2222
end function conv1d_layer_cons
2323

2424
module subroutine init(self, input_shape)
25-
implicit none
2625
class(conv1d_layer), intent(in out) :: self
2726
integer, intent(in) :: input_shape(:)
2827

@@ -58,14 +57,12 @@ module subroutine init(self, input_shape)
5857
end subroutine init
5958

6059
pure module subroutine forward(self, input)
61-
implicit none
6260
class(conv1d_layer), intent(in out) :: self
6361
real, intent(in) :: input(:,:)
64-
integer :: input_channels, input_width
62+
integer :: input_width
6563
integer :: j, n
6664
integer :: iws, iwe
6765

68-
input_channels = size(input, dim=1)
6966
input_width = size(input, dim=2)
7067

7168
! Loop over output positions.
@@ -89,7 +86,6 @@ pure module subroutine forward(self, input)
8986
end subroutine forward
9087

9188
pure module subroutine backward(self, input, gradient)
92-
implicit none
9389
class(conv1d_layer), intent(in out) :: self
9490
! 'input' has shape: (channels, input_width)
9591
! 'gradient' (dL/dy) has shape: (filters, output_width)
@@ -105,8 +101,6 @@ pure module subroutine backward(self, input, gradient)
105101
real :: db_local(self % filters)
106102
real :: dw_local(self % filters, self % channels, self % kernel_size)
107103

108-
! Determine dimensions.
109-
input_channels = size(input, dim=1)
110104
input_width = size(input, dim=2)
111105

112106
!--- Compute the local gradient gdz = (dL/dy) * sigma'(z) for each output.
@@ -126,7 +120,6 @@ pure module subroutine backward(self, input, gradient)
126120
do j = 1, self % width
127121
iws = self % stride * (j-1) + 1
128122
iwe = min(iws + self % kernel_size - 1, input_width)
129-
130123
do k = 1, self % channels
131124
! Weight gradient: accumulate contribution from the input window.
132125
dw_local(n,k,1:iwe-iws+1) = dw_local(n,k,1:iwe-iws+1) + input(k,iws:iwe) * gdz(n,j)

src/nf/nf_conv2d_layer_submodule.f90

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,6 @@ end subroutine init
6868

6969

7070
pure module subroutine forward(self, input)
71-
implicit none
7271
class(conv2d_layer), intent(in out) :: self
7372
real, intent(in) :: input(:,:,:)
7473
integer :: input_width, input_height, input_channels
@@ -121,7 +120,6 @@ end subroutine forward
121120

122121

123122
pure module subroutine backward(self, input, gradient)
124-
implicit none
125123
class(conv2d_layer), intent(in out) :: self
126124
real, intent(in) :: input(:,:,:)
127125
real, intent(in) :: gradient(:,:,:)

src/nf/nf_locally_connected2d_layer_submodule.f90

Lines changed: 3 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
contains
99

1010
module function locally_connected2d_layer_cons(filters, kernel_size, activation) result(res)
11-
implicit none
1211
integer, intent(in) :: filters
1312
integer, intent(in) :: kernel_size
1413
class(activation_function), intent(in) :: activation
@@ -21,7 +20,6 @@ module function locally_connected2d_layer_cons(filters, kernel_size, activation)
2120
end function locally_connected2d_layer_cons
2221

2322
module subroutine init(self, input_shape)
24-
implicit none
2523
class(locally_connected2d_layer), intent(in out) :: self
2624
integer, intent(in) :: input_shape(:)
2725

@@ -52,16 +50,11 @@ module subroutine init(self, input_shape)
5250
end subroutine init
5351

5452
pure module subroutine forward(self, input)
55-
implicit none
5653
class(locally_connected2d_layer), intent(in out) :: self
5754
real, intent(in) :: input(:,:)
58-
integer :: input_channels, input_width
5955
integer :: j, n
6056
integer :: iws, iwe
6157

62-
input_channels = size(input, dim=1)
63-
input_width = size(input, dim=2)
64-
6558
do j = 1, self % width
6659
iws = j
6760
iwe = j + self % kernel_size - 1
@@ -73,27 +66,21 @@ pure module subroutine forward(self, input)
7366
end subroutine forward
7467

7568
pure module subroutine backward(self, input, gradient)
76-
implicit none
7769
class(locally_connected2d_layer), intent(in out) :: self
7870
real, intent(in) :: input(:,:)
7971
real, intent(in) :: gradient(:,:)
80-
integer :: input_channels, input_width, output_width
8172
integer :: j, n, k
8273
integer :: iws, iwe
8374
real :: gdz(self % filters, self % width)
8475
real :: db_local(self % filters, self % width)
8576
real :: dw_local(self % filters, self % width, self % channels, self % kernel_size)
8677

87-
input_channels = size(input, dim=1)
88-
input_width = size(input, dim=2)
89-
output_width = self % width
90-
91-
do j = 1, output_width
78+
do j = 1, self % width
9279
gdz(:, j) = gradient(:, j) * self % activation % eval_prime(self % z(:, j))
9380
end do
9481

9582
do n = 1, self % filters
96-
do j = 1, output_width
83+
do j = 1, self % width
9784
db_local(n, j) = gdz(n, j)
9885
end do
9986
end do
@@ -102,7 +89,7 @@ pure module subroutine backward(self, input, gradient)
10289
self % gradient = 0.0
10390

10491
do n = 1, self % filters
105-
do j = 1, output_width
92+
do j = 1, self % width
10693
iws = j
10794
iwe = j + self % kernel_size - 1
10895
do k = 1, self % channels

src/nf/nf_maxpool1d_layer_submodule.f90

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
contains
66

77
pure module function maxpool1d_layer_cons(pool_size, stride) result(res)
8-
implicit none
98
integer, intent(in) :: pool_size
109
integer, intent(in) :: stride
1110
type(maxpool1d_layer) :: res
@@ -15,7 +14,6 @@ end function maxpool1d_layer_cons
1514

1615

1716
module subroutine init(self, input_shape)
18-
implicit none
1917
class(maxpool1d_layer), intent(in out) :: self
2018
integer, intent(in) :: input_shape(:)
2119

@@ -34,7 +32,6 @@ module subroutine init(self, input_shape)
3432
end subroutine init
3533

3634
pure module subroutine forward(self, input)
37-
implicit none
3835
class(maxpool1d_layer), intent(in out) :: self
3936
real, intent(in) :: input(:,:)
4037
integer :: input_width
@@ -70,7 +67,6 @@ pure module subroutine forward(self, input)
7067
end subroutine forward
7168

7269
pure module subroutine backward(self, input, gradient)
73-
implicit none
7470
class(maxpool1d_layer), intent(in out) :: self
7571
real, intent(in) :: input(:,:)
7672
real, intent(in) :: gradient(:,:)

src/nf/nf_maxpool2d_layer_submodule.f90

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
contains
66

77
pure module function maxpool2d_layer_cons(pool_size, stride) result(res)
8-
implicit none
98
integer, intent(in) :: pool_size
109
integer, intent(in) :: stride
1110
type(maxpool2d_layer) :: res
@@ -15,7 +14,6 @@ end function maxpool2d_layer_cons
1514

1615

1716
module subroutine init(self, input_shape)
18-
implicit none
1917
class(maxpool2d_layer), intent(in out) :: self
2018
integer, intent(in) :: input_shape(:)
2119

@@ -39,7 +37,6 @@ end subroutine init
3937

4038

4139
pure module subroutine forward(self, input)
42-
implicit none
4340
class(maxpool2d_layer), intent(in out) :: self
4441
real, intent(in) :: input(:,:,:)
4542
integer :: input_width, input_height
@@ -86,7 +83,6 @@ end subroutine forward
8683

8784

8885
pure module subroutine backward(self, input, gradient)
89-
implicit none
9086
class(maxpool2d_layer), intent(in out) :: self
9187
real, intent(in) :: input(:,:,:)
9288
real, intent(in) :: gradient(:,:,:)

test/CMakeLists.txt

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
add_library(tuff tuff.f90)
2+
13
foreach(execid
24
input1d_layer
35
input2d_layer
@@ -27,7 +29,7 @@ foreach(execid
2729
metrics
2830
)
2931
add_executable(test_${execid} test_${execid}.f90)
30-
target_link_libraries(test_${execid} PRIVATE neural-fortran ${LIBS})
32+
target_link_libraries(test_${execid} PRIVATE tuff neural-fortran ${LIBS})
3133

3234
add_test(NAME test_${execid} COMMAND test_${execid})
3335
endforeach()

test/test_dense_layer.f90

Lines changed: 17 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -1,58 +1,23 @@
11
program test_dense_layer
2-
use iso_fortran_env, only: stderr => error_unit
3-
use nf, only: dense, layer
4-
use nf_activation, only: relu
2+
use nf, only: dense, layer, relu
3+
use tuff, only: test, test_result
54
implicit none
6-
type(layer) :: layer1, layer2
7-
logical :: ok = .true.
5+
type(layer) :: layer1, layer2, layer3
6+
type(test_result) :: tests
87

98
layer1 = dense(10)
10-
11-
if (.not. layer1 % name == 'dense') then
12-
ok = .false.
13-
write(stderr, '(a)') 'dense layer has its name set correctly.. failed'
14-
end if
15-
16-
if (.not. all(layer1 % layer_shape == [10])) then
17-
ok = .false.
18-
write(stderr, '(a)') 'dense layer is created with requested size.. failed'
19-
end if
20-
21-
if (layer1 % initialized) then
22-
ok = .false.
23-
write(stderr, '(a)') 'dense layer should not be marked as initialized yet.. failed'
24-
end if
25-
26-
if (.not. layer1 % activation == 'sigmoid') then
27-
ok = .false.
28-
write(stderr, '(a)') 'dense layer is defaults to sigmoid activation.. failed'
29-
end if
30-
31-
layer1 = dense(10, activation=relu())
32-
33-
if (.not. layer1 % activation == 'relu') then
34-
ok = .false.
35-
write(stderr, '(a)') 'dense layer is created with the specified activation.. failed'
36-
end if
37-
38-
layer2 = dense(20)
39-
call layer2 % init(layer1)
40-
41-
if (.not. layer2 % initialized) then
42-
ok = .false.
43-
write(stderr, '(a)') 'dense layer should now be marked as initialized.. failed'
44-
end if
45-
46-
if (.not. all(layer2 % input_layer_shape == [10])) then
47-
ok = .false.
48-
write(stderr, '(a)') 'dense layer should have a correct input layer shape.. failed'
49-
end if
50-
51-
if (ok) then
52-
print '(a)', 'test_dense_layer: All tests passed.'
53-
else
54-
write(stderr, '(a)') 'test_dense_layer: One or more tests failed.'
55-
stop 1
56-
end if
9+
layer2 = dense(10, activation=relu())
10+
layer3 = dense(20)
11+
call layer3 % init(layer1)
12+
13+
tests = test("test_dense_layer", [ &
14+
test("layer name is set", layer1 % name == 'dense'), &
15+
test("layer shape is correct", all(layer1 % layer_shape == [10])), &
16+
test("layer is initialized", layer3 % initialized), &
17+
test("layer's default activation is sigmoid", layer1 % activation == 'sigmoid'), &
18+
test("user set activation works", layer2 % activation == 'relu'), &
19+
test("layer initialized after init", layer3 % initialized), &
20+
test("layer input shape is set after init", all(layer3 % input_layer_shape == [10])) &
21+
])
5722

5823
end program test_dense_layer

test/test_dense_network.f90

Lines changed: 38 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,40 @@
11
program test_dense_network
22
use iso_fortran_env, only: stderr => error_unit
3-
use nf, only: dense, input, network
4-
use nf_optimizers, only: sgd
3+
use nf, only: dense, input, network, sgd
4+
use tuff, only: test, test_result
55
implicit none
66
type(network) :: net
7-
logical :: ok = .true.
7+
type(test_result) :: tests
88

99
! Minimal 2-layer network
1010
net = network([ &
1111
input(1), &
1212
dense(1) &
1313
])
1414

15-
if (.not. size(net % layers) == 2) then
16-
write(stderr, '(a)') 'dense network should have 2 layers.. failed'
17-
ok = .false.
18-
end if
15+
tests = test("test_dense_network", [ &
16+
test("network has 2 layers", size(net % layers) == 2), &
17+
test("network predicts 0.5 for input 0", all(net % predict([0.]) == 0.5)), &
18+
test(simple_training), &
19+
test(larger_network_size) &
20+
])
1921

20-
if (.not. all(net % predict([0.]) == 0.5)) then
21-
write(stderr, '(a)') &
22-
'dense network should output exactly 0.5 for input 0.. failed'
23-
ok = .false.
24-
end if
22+
contains
2523

26-
training: block
24+
type(test_result) function simple_training() result(res)
2725
real :: x(1), y(1)
2826
real :: tolerance = 1e-3
2927
integer :: n
30-
integer, parameter :: num_iterations = 1000
28+
integer, parameter :: num_iterations = 1000
29+
type(network) :: net
30+
31+
res % name = 'simple training'
32+
33+
! Minimal 2-layer network
34+
net = network([ &
35+
input(1), &
36+
dense(1) &
37+
])
3138

3239
x = [0.123]
3340
y = [0.765]
@@ -39,32 +46,25 @@ program test_dense_network
3946
if (all(abs(net % predict(x) - y) < tolerance)) exit
4047
end do
4148

42-
if (.not. n <= num_iterations) then
43-
write(stderr, '(a)') &
44-
'dense network should converge in simple training.. failed'
45-
ok = .false.
46-
end if
49+
res % ok = n <= num_iterations
4750

48-
end block training
51+
end function simple_training
4952

50-
! A bit larger multi-layer network
51-
net = network([ &
52-
input(784), &
53-
dense(30), &
54-
dense(20), &
55-
dense(10) &
56-
])
53+
type(test_result) function larger_network_size() result(res)
54+
type(network) :: net
55+
56+
res % name = 'larger network training'
57+
58+
! A bit larger multi-layer network
59+
net = network([ &
60+
input(784), &
61+
dense(30), &
62+
dense(20), &
63+
dense(10) &
64+
])
5765

58-
if (.not. size(net % layers) == 4) then
59-
write(stderr, '(a)') 'dense network should have 4 layers.. failed'
60-
ok = .false.
61-
end if
66+
res % ok = size(net % layers) == 4
6267

63-
if (ok) then
64-
print '(a)', 'test_dense_network: All tests passed.'
65-
else
66-
write(stderr, '(a)') 'test_dense_network: One or more tests failed.'
67-
stop 1
68-
end if
68+
end function larger_network_size
6969

70-
end program test_dense_network
70+
end program test_dense_network

0 commit comments

Comments
 (0)