activation_enum.py
class Activations(Activation, Enum):
RELU = Relu()
TANH = Tanh()
SIGMOID = Sigmoid()
SOFTMAX = Softmax()
base_activation.py
class Activation:
def activate(self, array: np.ndarray):
print("in activation")
pass
def derivative(self, array: np.ndarray):
pass
relu.py
class Relu(Activation):
def activate(self, array: np.ndarray):
print('in relu')
return np.maximum(0, array)
def derivative(self, array: np.ndarray):
return np.where(array <= 0, 0, 1)
主.py
activ1 = Relu().activate(array=np.array([11, -11])) # output: in relu
activ2 = Activations.RELU.activate(array=np.array([11, -11])) # output: in activation
activ2 输出为“激活中”,而所需输出为“relu 中”。
谢谢!
您必须调用枚举的值才能正确使用这些方法。例如:
activ2 = Activations.RELU.value.activate(array=np.array([11, -11]))
这将按预期调用
Relu
类的 activate 方法。