-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathNN.py
73 lines (55 loc) · 2.61 KB
/
NN.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
# Created by M. Krouwel
# based on work by Milo Spencer-Harper https://github.com/miloharper/simple-neural-network
# Class for creating and using a own created NN model for Connect 4
# Function: train model, predict based on model, save model, load model
from typing import Any, List
import warnings
import numpy as np
class NN:
__synaptic_weights : np.ndarray[Any, np.dtype[np.floating[Any]]]#List[List[float]]# : np.ndarray[np.floating]
__numberOfInputs : int
__numberOfOutputs : int
def __init__(self, numberOfInputs : int, numberOfOutputs : int):
self.__numberOfInputs = numberOfInputs
self.__numberOfOutputs = numberOfOutputs
np.random.seed(1)
self.__synaptic_weights = 2 * np.random.random((numberOfInputs, numberOfOutputs)) - 1
@staticmethod
def __sigmoid(x):
warnings.filterwarnings('ignore')
return 1 / (1 + np.exp(-x))
@staticmethod
def __sigmoid_derivative(x):
return x * (1 - x)
def train(self, dataset, iterations : int, reportEvery : int):
input = []
output = np.zeros((len(dataset), self.__numberOfOutputs))
for i in range(len(dataset)):
input.append(dataset[i][1])
output[i][dataset[i][0]] = 1
X = np.array(input).reshape((-1, self.__numberOfInputs))
#Y = to_categorical(output, num_classes=self.__numberOfOutputs)
for i in range(iterations):
# Pass the training set through our neural network (a single neuron).
NNguess = self.__think(X)
#print(NNguess)
# Calculate the error (The difference between the desired output and the predicted output).
error = output - NNguess
#print(error)
# Multiply the error by the input and again by the gradient of the Sigmoid curve.
# This means less confident weights are adjusted more.
# This means inputs, which are zero, do not cause changes to the weights.
adjustment = np.dot(X.T, error * self.__sigmoid_derivative(NNguess))
#print(adjustment)
# Adjust the weights.
self.__synaptic_weights += adjustment
if i%reportEvery == 0:
print(i)
def __think(self, data):
return self.__sigmoid(np.dot(np.array(data).reshape(-1, self.__numberOfInputs), self.__synaptic_weights))
def predict(self, data, index):
return self.__think(data)[0][index]
def save(self, path : str):
np.savetxt(path, self.__synaptic_weights, delimiter=',')
def load(self, path : str):
self.__synaptic_weights = np.loadtxt(path, delimiter=',')