File size: 1,230 Bytes
8c348c5
6307b4f
8c348c5
 
6307b4f
 
d7ea050
6307b4f
8c348c5
6307b4f
d7ea050
6307b4f
8c348c5
 
6307b4f
 
 
8c348c5
6307b4f
 
8c348c5
 
6307b4f
 
 
8c348c5
6307b4f
 
8c348c5
 
6307b4f
 
 
8c348c5
6307b4f
 
 
8c348c5
 
6307b4f
 
ff1254a
6307b4f
ff1254a
6307b4f
ff1254a
6307b4f
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import numpy as np
from abc import abstractmethod, ABC


class Activation(ABC):
    @abstractmethod
    def forward(X: np.ndarray) -> np.ndarray:
        pass

    @abstractmethod
    def backward(X: np.ndarray) -> np.ndarray:
        pass


class Relu(Activation):
    def forward(self, X: np.ndarray) -> np.ndarray:
        return np.maximum(0, X)

    def backward(self, X: np.ndarray) -> np.ndarray:
        return np.where(X > 0, 1, 0)


class TanH(Activation):
    def forward(self, X: np.ndarray) -> np.ndarray:
        return np.tanh(X)

    def backward(self, X: np.ndarray) -> np.ndarray:
        return 1 - self.forward(X) ** 2


class Sigmoid(Activation):
    def forward(self, X: np.ndarray) -> np.ndarray:
        return 1.0 / (1.0 + np.exp(-X))

    def backward(self, X: np.ndarray) -> np.ndarray:
        s = self.forward(X)
        return s - (1 - s)


class SoftMax(Activation):
    def forward(self, X: np.ndarray) -> np.ndarray:
        ax = 1 if X.ndim > 1 else 0
        exps = np.exp(
            X - np.max(X, axis=ax, keepdims=True)
        )  # Avoid numerical instability
        return exps / np.sum(exps, axis=ax, keepdims=True)

    def backward(self, X: np.ndarray) -> np.ndarray:
        return X