Skip to content

Commit d9b4070

Browse files
committed
Integrated new features into environment
1 parent 1db0258 commit d9b4070

13 files changed

+1047
-0
lines changed

CMakeLists.txt

+8
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,12 @@ include(cmake/compilers.cmake)
1717
add_library(neural-fortran
1818
src/nf.f90
1919
src/nf/nf_activation.f90
20+
src/nf/nf_avgpool1d_layer.f90
21+
src/nf/nf_avgpool1d_layer_submodule.f90
22+
src/nf/nf_avgpool2d_layer.f90
23+
src/nf/nf_avgpool2d_layer_submodule.f90
24+
src/nf/nf_avgpool3d_layer.f90
25+
src/nf/nf_avgpool3d_layer_submodule.f90
2026
src/nf/nf_base_layer.f90
2127
src/nf/nf_conv1d_layer.f90
2228
src/nf/nf_conv1d_layer_submodule.f90
@@ -55,6 +61,8 @@ add_library(neural-fortran
5561
src/nf/nf_maxpool1d_layer_submodule.f90
5662
src/nf/nf_maxpool2d_layer.f90
5763
src/nf/nf_maxpool2d_layer_submodule.f90
64+
src/nf/nf_maxpool3d_layer.f90
65+
src/nf/nf_maxpool3d_layer_submodule.f90
5866
src/nf/nf_metrics.f90
5967
src/nf/nf_multihead_attention.f90
6068
src/nf/nf_multihead_attention_submodule.f90

src/nf.f90

+4
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,9 @@ module nf
33
use nf_datasets_mnist, only: label_digits, load_mnist
44
use nf_layer, only: layer
55
use nf_layer_constructors, only: &
6+
avgpool1d, &
7+
avgpool2d, &
8+
avgpool3d, &
69
conv1d, &
710
conv2d, &
811
dense, &
@@ -15,6 +18,7 @@ module nf
1518
locally_connected1d, &
1619
maxpool1d, &
1720
maxpool2d, &
21+
maxpool3d, &
1822
reshape, &
1923
self_attention
2024
use nf_loss, only: mse, quadratic

src/nf/nf_avgpool1d_layer.f90

+66
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
module nf_avgpool1d_layer
2+
!! This module provides the 1-d average pooling layer.
3+
4+
use nf_base_layer, only: base_layer
5+
implicit none
6+
7+
private
8+
public :: avgpool1d_layer
9+
10+
type, extends(base_layer) :: avgpool1d_layer
11+
integer :: channels
12+
integer :: width ! Length of the input along the pooling dimension
13+
integer :: pool_size
14+
integer :: stride
15+
16+
! Gradient for the input (same shape as the input).
17+
real, allocatable :: gradient(:,:)
18+
! Output after pooling (dimensions: (channels, new_width)).
19+
real, allocatable :: output(:,:)
20+
contains
21+
procedure :: init
22+
procedure :: forward
23+
procedure :: backward
24+
end type avgpool1d_layer
25+
26+
interface avgpool1d_layer
27+
pure module function avgpool1d_layer_cons(pool_size, stride) result(res)
28+
!! `avgpool1d` constructor function.
29+
integer, intent(in) :: pool_size
30+
!! Width of the pooling window.
31+
integer, intent(in) :: stride
32+
!! Stride of the pooling window.
33+
type(avgpool1d_layer) :: res
34+
end function avgpool1d_layer_cons
35+
end interface avgpool1d_layer
36+
37+
interface
38+
module subroutine init(self, input_shape)
39+
!! Initialize the `avgpool1d` layer instance with an input shape.
40+
class(avgpool1d_layer), intent(in out) :: self
41+
!! `avgpool1d_layer` instance.
42+
integer, intent(in) :: input_shape(:)
43+
!! Array shape of the input layer, expected as (channels, width).
44+
end subroutine init
45+
46+
pure module subroutine forward(self, input)
47+
!! Run a forward pass of the `avgpool1d` layer.
48+
class(avgpool1d_layer), intent(in out) :: self
49+
!! `avgpool1d_layer` instance.
50+
real, intent(in) :: input(:,:)
51+
!! Input data (output of the previous layer), with shape (channels, width).
52+
end subroutine forward
53+
54+
pure module subroutine backward(self, input, gradient)
55+
!! Run a backward pass of the `avgpool1d` layer.
56+
class(avgpool1d_layer), intent(in out) :: self
57+
!! `avgpool1d_layer` instance.
58+
real, intent(in) :: input(:,:)
59+
!! Input data (output of the previous layer).
60+
real, intent(in) :: gradient(:,:)
61+
!! Gradient from the downstream layer, with shape (channels, pooled width).
62+
end subroutine backward
63+
end interface
64+
65+
end module nf_avgpool1d_layer
66+
+87
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
submodule(nf_avgpool1d_layer) nf_avgpool1d_layer_submodule
2+
implicit none
3+
4+
contains
5+
6+
pure module function avgpool1d_layer_cons(pool_size, stride) result(res)
7+
implicit none
8+
integer, intent(in) :: pool_size
9+
integer, intent(in) :: stride
10+
type(avgpool1d_layer) :: res
11+
12+
res % pool_size = pool_size
13+
res % stride = stride
14+
end function avgpool1d_layer_cons
15+
16+
17+
module subroutine init(self, input_shape)
18+
implicit none
19+
class(avgpool1d_layer), intent(in out) :: self
20+
integer, intent(in) :: input_shape(:)
21+
! input_shape is expected to be (channels, width)
22+
23+
self % channels = input_shape(1)
24+
! The new width is the integer division of the input width by the stride.
25+
self % width = input_shape(2) / self % stride
26+
27+
! Allocate the gradient array corresponding to the input dimensions.
28+
allocate(self % gradient(input_shape(1), input_shape(2)))
29+
self % gradient = 0
30+
31+
! Allocate the output array (after pooling).
32+
allocate(self % output(self % channels, self % width))
33+
self % output = 0
34+
end subroutine init
35+
36+
37+
pure module subroutine forward(self, input)
38+
implicit none
39+
class(avgpool1d_layer), intent(in out) :: self
40+
real, intent(in) :: input(:,:)
41+
integer :: input_width
42+
integer :: i, n
43+
integer :: ii, iend
44+
integer :: iextent
45+
46+
input_width = size(input, dim=2)
47+
! Ensure we only process complete pooling regions.
48+
iextent = input_width - mod(input_width, self % stride)
49+
50+
! Loop over the input with a step size equal to the stride and over all channels.
51+
do concurrent (i = 1:iextent: self % stride, n = 1:self % channels)
52+
! Compute the index in the pooled (output) array.
53+
ii = (i - 1) / self % stride + 1
54+
! Determine the ending index of the current pooling region.
55+
iend = min(i + self % pool_size - 1, input_width)
56+
57+
! Compute the average over the pooling region.
58+
self % output(n, ii) = sum(input(n, i:iend)) / (iend - i + 1)
59+
end do
60+
end subroutine forward
61+
62+
63+
pure module subroutine backward(self, input, gradient)
64+
implicit none
65+
class(avgpool1d_layer), intent(in out) :: self
66+
real, intent(in) :: input(:,:)
67+
real, intent(in) :: gradient(:,:)
68+
integer :: channels, pooled_width
69+
integer :: i, n, j, istart, iend
70+
real :: scale_factor
71+
72+
channels = size(gradient, dim=1)
73+
pooled_width = size(gradient, dim=2)
74+
75+
! The gradient for average pooling is distributed evenly over the pooling window.
76+
do concurrent (n = 1:channels, i = 1:pooled_width)
77+
istart = (i - 1) * self % stride + 1
78+
iend = min(istart + self % pool_size - 1, size(input, dim=2))
79+
scale_factor = 1.0 / (iend - istart + 1)
80+
81+
do j = istart, iend
82+
self % gradient(n, j) = gradient(n, i) * scale_factor
83+
end do
84+
end do
85+
end subroutine backward
86+
87+
end submodule nf_avgpool1d_layer_submodule

src/nf/nf_avgpool2d_layer.f90

+66
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
module nf_avgpool2d_layer
2+
!! This module provides the 2-d average pooling layer.
3+
4+
use nf_base_layer, only: base_layer
5+
implicit none
6+
7+
private
8+
public :: avgpool2d_layer
9+
10+
type, extends(base_layer) :: avgpool2d_layer
11+
integer :: channels
12+
integer :: height ! Height of the input
13+
integer :: width ! Width of the input
14+
integer :: pool_size ! Pooling window size (height, width)
15+
integer :: stride ! Stride (height, width)
16+
17+
! Gradient for the input (same shape as the input: channels, height, width).
18+
real, allocatable :: gradient(:,:,:)
19+
! Output after pooling (dimensions: (channels, new_height, new_width)).
20+
real, allocatable :: output(:,:,:)
21+
contains
22+
procedure :: init
23+
procedure :: forward
24+
procedure :: backward
25+
end type avgpool2d_layer
26+
27+
interface avgpool2d_layer
28+
pure module function avgpool2d_layer_cons(pool_size, stride) result(res)
29+
!! `avgpool2d` constructor function.
30+
integer, intent(in) :: pool_size
31+
!! Pooling window size (height, width).
32+
integer, intent(in) :: stride
33+
!! Stride (height, width).
34+
type(avgpool2d_layer) :: res
35+
end function avgpool2d_layer_cons
36+
end interface avgpool2d_layer
37+
38+
interface
39+
module subroutine init(self, input_shape)
40+
!! Initialize the `avgpool2d` layer instance with an input shape.
41+
class(avgpool2d_layer), intent(in out) :: self
42+
!! `avgpool2d_layer` instance.
43+
integer, intent(in) :: input_shape(:)
44+
!! Array shape of the input layer, expected as (channels, height, width).
45+
end subroutine init
46+
47+
pure module subroutine forward(self, input)
48+
!! Run a forward pass of the `avgpool2d` layer.
49+
class(avgpool2d_layer), intent(in out) :: self
50+
!! `avgpool2d_layer` instance.
51+
real, intent(in) :: input(:,:,:)
52+
!! Input data (output of the previous layer), with shape (channels, height, width).
53+
end subroutine forward
54+
55+
pure module subroutine backward(self, input, gradient)
56+
!! Run a backward pass of the `avgpool2d` layer.
57+
class(avgpool2d_layer), intent(in out) :: self
58+
!! `avgpool2d_layer` instance.
59+
real, intent(in) :: input(:,:,:)
60+
!! Input data (output of the previous layer).
61+
real, intent(in) :: gradient(:,:,:)
62+
!! Gradient from the downstream layer, with shape (channels, pooled_height, pooled_width).
63+
end subroutine backward
64+
end interface
65+
66+
end module nf_avgpool2d_layer
+94
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
submodule(nf_avgpool2d_layer) nf_avgpool2d_layer_submodule
2+
implicit none
3+
4+
contains
5+
6+
pure module function avgpool2d_layer_cons(pool_size, stride) result(res)
7+
implicit none
8+
integer, intent(in) :: pool_size
9+
integer, intent(in) :: stride
10+
type(avgpool2d_layer) :: res
11+
12+
res % pool_size = pool_size
13+
res % stride = stride
14+
end function avgpool2d_layer_cons
15+
16+
17+
module subroutine init(self, input_shape)
18+
implicit none
19+
class(avgpool2d_layer), intent(in out) :: self
20+
integer, intent(in) :: input_shape(:)
21+
! input_shape is expected to be (channels, width, height)
22+
23+
self % channels = input_shape(1)
24+
self % width = input_shape(2) / self % stride
25+
self % height = input_shape(3) / self % stride
26+
27+
! Allocate the gradient array corresponding to the input dimensions.
28+
allocate(self % gradient(input_shape(1), input_shape(2), input_shape(3)))
29+
self % gradient = 0
30+
31+
! Allocate the output array (after pooling).
32+
allocate(self % output(self % channels, self % width, self % height))
33+
self % output = 0
34+
end subroutine init
35+
36+
37+
pure module subroutine forward(self, input)
38+
implicit none
39+
class(avgpool2d_layer), intent(in out) :: self
40+
real, intent(in) :: input(:,:,:)
41+
integer :: input_width, input_height
42+
integer :: i, j, n
43+
integer :: ii, jj, iend, jend
44+
integer :: iextent, jextent
45+
46+
input_width = size(input, dim=2)
47+
input_height = size(input, dim=3)
48+
49+
! Ensure we only process complete pooling regions.
50+
iextent = input_width - mod(input_width, self % stride)
51+
jextent = input_height - mod(input_height, self % stride)
52+
53+
! Loop over the input with a step size equal to the stride and over all channels.
54+
do concurrent (i = 1:iextent:self % stride, j = 1:jextent:self % stride, n = 1:self % channels)
55+
ii = (i - 1) / self % stride + 1
56+
jj = (j - 1) / self % stride + 1
57+
58+
iend = min(i + self % pool_size - 1, input_width)
59+
jend = min(j + self % pool_size - 1, input_height)
60+
61+
! Compute the average over the pooling region.
62+
self % output(n, ii, jj) = sum(input(n, i:iend, j:jend)) / ((iend - i + 1) * (jend - j + 1))
63+
end do
64+
end subroutine forward
65+
66+
67+
pure module subroutine backward(self, input, gradient)
68+
implicit none
69+
class(avgpool2d_layer), intent(in out) :: self
70+
real, intent(in) :: input(:,:,:)
71+
real, intent(in) :: gradient(:,:,:)
72+
integer :: channels, pooled_width, pooled_height
73+
integer :: i, j, n, x, y, istart, iend, jstart, jend
74+
real :: scale_factor
75+
76+
channels = size(gradient, dim=1)
77+
pooled_width = size(gradient, dim=2)
78+
pooled_height = size(gradient, dim=3)
79+
80+
! The gradient for average pooling is distributed evenly over the pooling window.
81+
do concurrent (n = 1:channels, i = 1:pooled_width, j = 1:pooled_height)
82+
istart = (i - 1) * self % stride + 1
83+
iend = min(istart + self % pool_size - 1, size(input, dim=2))
84+
jstart = (j - 1) * self % stride + 1
85+
jend = min(jstart + self % pool_size - 1, size(input, dim=3))
86+
scale_factor = 1.0 / ((iend - istart + 1) * (jend - jstart + 1))
87+
88+
do concurrent (x = istart:iend, y = jstart:jend)
89+
self % gradient(n, x, y) = gradient(n, i, j) * scale_factor
90+
end do
91+
end do
92+
end subroutine backward
93+
94+
end submodule nf_avgpool2d_layer_submodule

0 commit comments

Comments
 (0)