57 lines
2.0 KiB
Python
57 lines
2.0 KiB
Python
import numpy as np
|
|
import random
|
|
|
|
|
|
def mat_mult(A, B):
|
|
return [
|
|
[sum([A[i][m] * B[m][j] for m in range(len(A[0]))]) for j in range(len(B[0]))]
|
|
for i in range(len(A))
|
|
]
|
|
|
|
|
|
class Neural_Network(object):
|
|
# inspired from https://enlight.nyc/projects/neural-network/
|
|
def __init__(self, W1=None, W2=None):
|
|
# parameters
|
|
self.inputSize = 3
|
|
self.outputSize = 2
|
|
self.hiddenSize = 3
|
|
self.fitness = 0
|
|
|
|
# weights
|
|
if W1 is not None:
|
|
self.W1 = W1
|
|
else:
|
|
self.W1 = np.random.randn(
|
|
self.inputSize, self.hiddenSize
|
|
) # weights from input to hidden layer
|
|
|
|
if W2 is not None:
|
|
self.W2 = W2
|
|
else:
|
|
self.W2 = np.random.randn(
|
|
self.hiddenSize, self.outputSize
|
|
) # weights from hidden to output layer
|
|
# self.w1 = [[random.random() for i in range(self.hiddenSize)] for i in range(self.inputSize)]
|
|
# self.w2 = [[random.random() for i in range(self.outputSize)] for i in range(self.hiddenSize)]
|
|
|
|
def predict(self, X):
|
|
# forward propagation through our network
|
|
self.z = np.dot(
|
|
X, self.W1
|
|
) # dot product of X (input) and first set of 3x2 weights
|
|
self.z2 = self.sigmoid(self.z) # activation function
|
|
self.z3 = np.dot(
|
|
self.z2, self.W2
|
|
) # dot product of hidden layer (z2) and second set of 3x1 weights
|
|
o = self.sigmoid(self.z3) # final activation function
|
|
# self.z = mat_mult(X, self.w1) # dot product of X (input) and first set of 3x2 weights
|
|
# self.z2 = self.sigmoid(self.z) # activation function
|
|
# self.z3 = mat_mult(self.z2, self.w2) # dot product of hidden layer (z2) and second set of 3x1 weights
|
|
# o = self.sigmoid(self.z3) # final activation function
|
|
return o
|
|
|
|
def sigmoid(self, s):
|
|
# activation function
|
|
return 1 / (1 + np.exp(-s)) - 0.5
|