Skip to content

Commit fa4e2ac

Browse files
committed
added neural network with 2 hidden layers
1 parent ae8a5f8 commit fa4e2ac

File tree

1 file changed

+112
-0
lines changed

1 file changed

+112
-0
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
import numpy
2+
3+
class NeuralNetwork:
4+
5+
def __init__(self, input_array, output_array):
6+
'''
7+
input_array : input values for training the neural network.
8+
output_array : expected output values of the given inputs.
9+
'''
10+
self.input = input_array
11+
#Initial weights are assigned randomly where first argument is the number of nodes in previous layer
12+
#and second argument is the number of nodes in the next layer.
13+
14+
#random initial weights for the input layer
15+
#self.input.shape[1] is used to reprsent number of nodes in input layer
16+
#first hidden layer consists of 4 nodes
17+
self.weights1 = numpy.random.rand(self.input.shape[1],4)
18+
19+
#random initial weights for the first hidden layer
20+
#first hidden layer has 4 nodes
21+
#second hidden layer has 3 nodes
22+
self.weights2 = numpy.random.rand(4,3)
23+
24+
#random inital weights for the second hidden layer
25+
#second hidden layer has 3 nodes
26+
#output layer has 1 node
27+
self.weights3 = numpy.random.rand(3,1)
28+
29+
self.y = output_array
30+
self.output = numpy.zeros(output_array.shape)
31+
32+
def feedforward(self):
33+
'''
34+
feedforward propagation using sigmoid activation function between layers
35+
return the last layer of the neural network
36+
'''
37+
#layer1 is the layer connecting the input nodes with the first hidden layer nodes
38+
self.layer1 = sigmoid(numpy.dot(self.input, self.weights1))
39+
40+
#layer2 is the layer connecting the first hidden set of nodes with the second hidden set of nodes
41+
self.layer2 = sigmoid(numpy.dot(self.layer1, self.weights2))
42+
43+
#layer3 is the layer connecting second hidden layer with the output node
44+
self.layer3 = sigmoid(numpy.dot(self.layer2,self.weights3))
45+
46+
return self.layer3
47+
48+
def back_propagation(self):
49+
'''
50+
backpropagating between the layers using sigmoid derivative and loss between layers
51+
updates the weights between the layers
52+
'''
53+
54+
updated_weights3 = numpy.dot(self.layer2.T,2*(self.y-self.output)*sigmoid_derivative(self.output))
55+
updated_weights2 = numpy.dot(self.layer1.T, numpy.dot(2*(self.y -self.output)*sigmoid_derivative(self.output), self.weights3.T)*sigmoid_derivative(self.layer2))
56+
updated_weights1 = numpy.dot(self.input.T, numpy.dot(numpy.dot(2*(self.y -self.output)*sigmoid_derivative(self.output), self.weights3.T)*sigmoid_derivative(self.layer2),self.weights2.T)*sigmoid_derivative(self.layer1))
57+
58+
self.weights1 += updated_weights1
59+
self.weights2 += updated_weights2
60+
self.weights3 += updated_weights3
61+
62+
def train(self, output, iterations):
63+
'''
64+
output : required for calculating loss
65+
performs the feeding and back propagation process for the given number of iterations
66+
every iteration will update the weights of neural network
67+
'''
68+
for iteration in range(1,iterations+1):
69+
self.output = self.feedforward()
70+
self.back_propagation()
71+
print("Iteration %s "%iteration,"Loss: " + str(numpy.mean(numpy.square(output - self.feedforward()))))
72+
73+
def predict(self, input):
74+
'''
75+
predict output for the given input values
76+
'''
77+
self.array = input
78+
self.layer1 = sigmoid(numpy.dot(self.array, self.weights1))
79+
self.layer2 = sigmoid(numpy.dot(self.layer1, self.weights2))
80+
self.layer3 = sigmoid(numpy.dot(self.layer2,self.weights3))
81+
return self.layer3
82+
83+
def sigmoid(value):
84+
'''
85+
applies sigmoid activation function
86+
return normalized values
87+
'''
88+
return 1/(1+numpy.exp(-value))
89+
90+
def sigmoid_derivative(value):
91+
'''
92+
returns derivative of the sigmoid value
93+
'''
94+
return sigmoid(value)*(1-sigmoid(value))
95+
96+
97+
if __name__ == "__main__":
98+
99+
#input values
100+
input = numpy.array(([0,0,0],[0,0,1],[0,1,0],[0,1,1],
101+
[1,0,0],[1,0,1],[1,1,0],[1,1,1]),dtype=float)
102+
103+
#true output for the given input values
104+
output = numpy.array(([0],[1],[1],[0],
105+
[1],[0],[0],[1]),dtype=float)
106+
107+
#calling neural network class
108+
Neural_Network = NeuralNetwork(input_array= input, output_array= output)
109+
110+
#calling training function
111+
Neural_Network.train(output= output, iterations= 1000)
112+
print(Neural_Network.predict([0,1,1]))

0 commit comments

Comments
 (0)