Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/nf/nf_activation.f90
Original file line number Diff line number Diff line change
Expand Up @@ -733,7 +733,7 @@ pure function eval_3d_celu_prime(self, x) result(res)
end function eval_3d_celu_prime

! Utility Functions
function get_activation_by_name(activation_name) result(res)
pure function get_activation_by_name(activation_name) result(res)
character(len=*), intent(in) :: activation_name
class(activation_function), allocatable :: res

Expand Down Expand Up @@ -815,4 +815,4 @@ pure function get_name(self) result(name)
end select
end function get_name

end module nf_activation
end module nf_activation
9 changes: 0 additions & 9 deletions src/nf/nf_dense_layer.f90
Original file line number Diff line number Diff line change
Expand Up @@ -98,15 +98,6 @@ module subroutine get_gradients_ptr(self, dw_ptr, db_ptr)
real, pointer, intent(out) :: db_ptr(:)
end subroutine get_gradients_ptr

module subroutine set_params(self, params)
!! Set the parameters of this layer.
!! The parameters are ordered as weights first, biases second.
class(dense_layer), intent(in out) :: self
!! Dense layer instance
real, intent(in), target :: params(:)
!! Parameters of this layer
end subroutine set_params

module subroutine init(self, input_shape)
!! Initialize the layer data structures.
!!
Expand Down
1 change: 0 additions & 1 deletion src/nf/nf_dense_layer_submodule.f90
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
submodule(nf_dense_layer) nf_dense_layer_submodule

use nf_activation, only: activation_function
use nf_base_layer, only: base_layer
use nf_random, only: random_normal

implicit none
Expand Down
1 change: 0 additions & 1 deletion src/nf/nf_embedding_layer_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
#define ABSOLUTE 2

submodule(nf_embedding_layer) nf_embedding_layer_submodule
use nf_base_layer, only: base_layer
implicit none
contains
module function embedding_layer_cons(vocab_size, model_dimension, positional) result(res)
Expand Down
1 change: 0 additions & 1 deletion src/nf/nf_flatten_layer_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
!! It is used internally by the layer type.
!! It is not intended to be used directly by the user.

use nf_base_layer, only: base_layer

implicit none

Expand Down
3 changes: 1 addition & 2 deletions src/nf/nf_layer_constructors_submodule.f90
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
submodule(nf_layer_constructors) nf_layer_constructors_submodule

use nf_layer, only: layer
use nf_conv1d_layer, only: conv1d_layer
use nf_conv2d_layer, only: conv2d_layer
use nf_dense_layer, only: dense_layer
Expand All @@ -18,7 +17,7 @@
use nf_self_attention_layer, only: self_attention_layer
use nf_embedding_layer, only: embedding_layer
use nf_layernorm_layer, only: layernorm_layer
use nf_activation, only: activation_function, relu, sigmoid
use nf_activation, only: relu, sigmoid

implicit none

Expand Down
5 changes: 2 additions & 3 deletions src/nf/nf_multihead_attention_layer_submodule.f90
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
submodule(nf_multihead_attention_layer) nf_multihead_attention_layer_submodule
use nf_activation, only: softmax
use nf_base_layer, only: base_layer
use nf_linear2d_layer, only: linear2d_layer

implicit none
Expand Down Expand Up @@ -288,7 +287,7 @@ module subroutine init_base(self, input_shape)
self % model_dimension = input_shape(2)

if (mod(self % model_dimension, self % n_heads) /= 0) then
write(stderr, '(a)'), 'Number of heads must be divisible by model dimension'
write(stderr, '(a)') 'Number of heads must be divisible by model dimension'
error stop
end if
self % head_size = self % model_dimension / self % n_heads
Expand Down Expand Up @@ -335,4 +334,4 @@ module subroutine init_base(self, input_shape)
allocate(self % jacobian, mold=self % d_sdpa)
allocate(self % d_normalize, mold=self % attention_matrix)
end subroutine init_base
end submodule nf_multihead_attention_layer_submodule
end submodule nf_multihead_attention_layer_submodule
51 changes: 50 additions & 1 deletion src/nf/nf_optimizers.f90
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ module nf_optimizers
type, abstract :: optimizer_base_type
real :: learning_rate = 0.01
contains
procedure :: get_name
procedure(init), deferred :: init
procedure(minimize), deferred :: minimize
end type optimizer_base_type
Expand Down Expand Up @@ -312,4 +313,52 @@ pure subroutine minimize_adagrad(self, param, gradient)

end subroutine minimize_adagrad

end module nf_optimizers

! Utility Functions
!! Returns the default optimizer corresponding to the provided name
pure function get_optimizer_by_name(optimizer_name) result(res)
character(len=*), intent(in) :: optimizer_name
class(optimizer_base_type), allocatable :: res

select case(trim(optimizer_name))
case('adagrad')
allocate ( res, source = adagrad() )

case('adam')
allocate ( res, source = adam() )

case('rmsprop')
allocate ( res, source = rmsprop() )

case('sgd')
allocate ( res, source = sgd() )

case default
error stop 'optimizer_name must be one of: ' // &
'"adagrad", "adam", "rmsprop", "sgd".'
end select

end function get_optimizer_by_name


!! Returns the name of the optimizer
pure function get_name(self) result(name)
class(optimizer_base_type), intent(in) :: self
character(:), allocatable :: name

select type (self)
class is (adagrad)
name = 'adagrad'
class is (adam)
name = 'adam'
class is (rmsprop)
name = 'rmsprop'
class is (sgd)
name = 'sgd'
class default
error stop 'Unknown optimizer type.'
end select

end function get_name

end module nf_optimizers