-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdnn1.py
45 lines (37 loc) · 1.08 KB
/
dnn1.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
#%%
import numpy as np
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def softmax(x):
exp_x = np.exp(x)
sum_exp_x = np.sum(exp_x)
y = exp_x / sum_exp_x
return y
def indentify_function(x):
return x
def int_network():
network = {}
network['W1'] = np.array([[0.1, 0.3, 0.5], [0.2, 0.4, 0.6], [0.3, 0.5, 0.7]])
network['b1'] = np.array([0.1, 0.2, 0.3])
network['W2'] = np.array([[0.1, 0.4, 0.5], [0.2, 0.5, 0.7], [0.3, 0.6, 0.8]])
network['b2'] = np.array([0.1, 0.2, 0.3])
network['W3'] = np.array([[0.1, 0.3, 0.4], [0.2, 0.4, 0.5], [0.3, 0.5, 0.6]])
network['b3'] = np.array([0.1, 0.2, 0.3])
return network
def forward(network, x):
W1, W2, W3 = network['W1'], network['W2'], network['W3']
b1, b2, b3 = network['b1'], network['b2'], network['b3']
a1 = np.dot(x, W1) + b1
z1 = sigmoid(a1)
a2 = np.dot(z1, W2) + b2
z2 = sigmoid(a2)
a3 = np.dot(z2, W3) + b3
# y = softmax(a3)
y = a3
return y
network = int_network()
x = np.array([1.0, 0.5, 0.3])
y1 = forward(network, x)
print y1
y2 = softmax(y1)
print y2