Skip to content

Commit f6a88e9

Browse files
add more code
1 parent a3338d9 commit f6a88e9

File tree

7 files changed

+22
-3
lines changed

7 files changed

+22
-3
lines changed

src/tensor_array/activation.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
from tensor_array.core import Tensor
2+
from tensor_array.core import zeros
3+
4+
def relu(input):
5+
tensor_zeros = zeros(shape = input.shape(), dtype = input.dtype())
6+
return (input > tensor_zeros).condition(input, tensor_zeros)
7+
8+
def sigmoid(input):
9+
return input.sigmoid()
10+
11+
def softmax(input, dim = 0):
12+
return input
707 KB
Binary file not shown.

src/tensor_array/layers/attention/attention.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,11 @@
22
from .. import Layer
33
from ..util import Linear
44
from tensor_array.core import Tensor
5+
from tensor_array.activation import softmax
56

67
def scaled_dot_product_attention(q, k, v, mask = None):
78
attn_scores = q @ k.transpose(len(k.shape()) - 2, len(k.shape()) - 1)
8-
attn_probs = SoftMax(attn_scores, len(attn_scores.shape()) - 1)
9+
attn_probs = softmax(attn_scores, len(attn_scores.shape()) - 1)
910
return attn_probs @ v
1011

1112
class MultiheadAttention(Layer):

src/tensor_array/layers/attention/transformer.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from typing import Any
22
from .. import Layer
33
from .attention import MultiheadAttention
4+
from tensor_array.activation import relu
45
from ..util import Sequential
56
from ..util import Linear
67
from ..util import Activation
@@ -9,7 +10,7 @@ class TransformerEncoderImpl(Layer):
910
def __init__(self, d_model, n_head, ff_size) -> None:
1011
self.feed_forward = Sequential([
1112
Linear(ff_size),
12-
Activation(ReLU),
13+
Activation(relu),
1314
Linear(d_model)
1415
])
1516
self.multihead_attn = MultiheadAttention(d_model, n_head)
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
from tensor_array.layers.normalization import Normalization
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
from .. import Layer
2+
3+
class Normalization(Layer):
4+
pass

0 commit comments

Comments
 (0)