-
Notifications
You must be signed in to change notification settings - Fork 0
/
xorexperi.py
88 lines (60 loc) · 2 KB
/
xorexperi.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
import numpy as np
def tanh(x):
return np.tanh(x)
def tanh_prime(x):
return 1.0 - x**2
def NeuralNetwork(layers):
global activation, activation_prime, weights, r
activation = tanh
activation_prime = tanh_prime
weights = []
for i in range(1, len(layers) - 1):
r = 2*np.random.random((layers[i-1] + 1, layers[i] + 1)) -1
weights.append(r)
r = 2*np.random.random( (layers[i] + 1, layers[i+1])) - 1
weights.append(r)
def fit(X, y, epochs, learning_rate=0.5):
global weights, activation, activation_prime
ones = np.atleast_2d(np.ones(X.shape[0]))
print ones # for bias units
X = np.concatenate((ones.T, X), axis=1)
for k in range(epochs):
if k % 10000 == 0: print 'Iterations:', k
i = np.random.randint(X.shape[0])
a = [X[i]] # selecting a random input
for l in range(len(weights)):
dot_value = np.dot(a[l], weights[l])
_activation = activation(dot_value)
a.append(_activation)
error = y[i] - a[-1]
deltas = [error * activation_prime(a[-1])]
for l in range(len(a) - 2, 0, -1):
if(k == 0):
print l
deltas.append(deltas[-1].dot(weights[l].T)*activation_prime(a[l]))
deltas.reverse()
for i in range(len(weights)):
layer = np.atleast_2d(a[i])
delta = np.atleast_2d(deltas[i])
weights[i] += learning_rate * layer.T.dot(delta)
def predict(x):
print np.array(x)
print np.ones(1).T
a = np.concatenate((np.ones(1).T, np.array(x)))
print a
for l in range(0, len(weights)):
a = activation(np.dot(a, weights[l]))
return a
#error reduction
#variation of weights
print "ENter number of Iterations : "
epochs = raw_input()
NeuralNetwork([2,2,1])
X = np.array([[0, 0],
[0, 1],
[1, 0],
[1, 1]])
y = np.array([0, 1, 1, 0])
fit(X, y, int(epochs))
for e in X:
print(e,predict(e))