Skip to content

Commit 35d39b6

Browse files
committed
embedding_layer: add more comments
1 parent 2f53ddf commit 35d39b6

File tree

2 files changed

+9
-0
lines changed

2 files changed

+9
-0
lines changed

src/nf/nf_embedding_layer.f90

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@ pure module subroutine backward(self, input, gradient)
5858
end subroutine backward
5959

6060
pure module subroutine positional_encoding(self, pos)
61+
!! Sum embedding with positional info (trigonometric, not trianable)
6162
class(embedding_layer), intent(in out) :: self
6263
integer, intent(in) :: pos
6364
end subroutine positional_encoding

src/nf/nf_layer_constructors.f90

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -196,6 +196,14 @@ module function linear2d(out_features) result(res)
196196
end function linear2d
197197

198198
module function embedding(sequence_length, vocab_size, model_dimension) result(res)
199+
!! Embedding layer constructor.
200+
!!
201+
!! This layer is for inputting token indices from the dictionary to the network.
202+
!! Works as a trainable lookup table that converts each index into a vector.
203+
!! Embedding layer must be the first layer in a network.
204+
!! `sequence_length`: max len of input sequence
205+
!! `vocab_size`: length of token vocabulary
206+
!! `model_dimension`: size of target embeddings
199207
integer, intent(in) :: sequence_length, vocab_size, model_dimension
200208
type(layer) :: res
201209
end function embedding

0 commit comments

Comments
 (0)