gihakkk commited on
Commit
0873620
ยท
verified ยท
1 Parent(s): 66d4dbb

Upload MLP.py

Browse files
Files changed (1) hide show
  1. MLP.py +69 -0
MLP.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ # ํ•˜์ดํผํŒŒ๋ผ๋ฏธํ„ฐ ์„ค์ •
4
+ input_size = 2 # ์ž…๋ ฅ์ธต ๋…ธ๋“œ ์ˆ˜
5
+ hidden_size = 3 # ์€๋‹‰์ธต ๋…ธ๋“œ ์ˆ˜
6
+ output_size = 1 # ์ถœ๋ ฅ์ธต ๋…ธ๋“œ ์ˆ˜
7
+ learning_rate = 0.5 # ํ•™์Šต๋ฅ 
8
+ epochs = 10000 # ํ•™์Šต ๋ฐ˜๋ณต ํšŸ์ˆ˜
9
+
10
+ # 1. ๋ฐ์ดํ„ฐ์…‹ ์ •์˜ (XOR ๋ฌธ์ œ)
11
+ # ์ž…๋ ฅ ๋ฐ์ดํ„ฐ: [0,0], [0,1], [1,0], [1,1]
12
+ X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
13
+ # ์ •๋‹ต ๋ ˆ์ด๋ธ”: [0], [1], [1], [0]
14
+ y = np.array([[0], [1], [1], [0]])
15
+
16
+ # 2. ๊ฐ€์ค‘์น˜(W)์™€ ํŽธํ–ฅ(b) ์ดˆ๊ธฐํ™”
17
+ W1 = np.random.randn(input_size, hidden_size)
18
+ b1 = np.random.randn(hidden_size)
19
+ W2 = np.random.randn(hidden_size, output_size)
20
+ b2 = np.random.randn(output_size)
21
+
22
+ # 3. ํ™œ์„ฑํ™” ํ•จ์ˆ˜์™€ ๊ทธ ๋ฏธ๋ถ„ ํ•จ์ˆ˜ ์ •์˜
23
+ def sigmoid(x):
24
+ return 1 / (1 + np.exp(-x))
25
+
26
+ def sigmoid_derivative(x):
27
+ return x * (1 - x)
28
+
29
+ # 4. ํ•™์Šต ์‹œ์ž‘
30
+ for epoch in range(epochs):
31
+ # ์ˆœ์ „ํŒŒ (Forward Propagation)
32
+ # ์€๋‹‰์ธต
33
+ hidden_output = np.dot(X, W1) + b1
34
+ hidden_activation = sigmoid(hidden_output)
35
+
36
+ # ์ถœ๋ ฅ์ธต
37
+ output_output = np.dot(hidden_activation, W2) + b2
38
+ predicted_output = sigmoid(output_output)
39
+
40
+ # ์—ญ์ „ํŒŒ (Backpropagation)
41
+ # 1๋‹จ๊ณ„: ์ถœ๋ ฅ์ธต์˜ ์˜ค์ฐจ์™€ ๊ธฐ์šธ๊ธฐ ๊ณ„์‚ฐ
42
+ error_output = y - predicted_output
43
+ delta_output = error_output * sigmoid_derivative(predicted_output)
44
+
45
+ # 2๋‹จ๊ณ„: ์€๋‹‰์ธต์˜ ์˜ค์ฐจ์™€ ๊ธฐ์šธ๊ธฐ ๊ณ„์‚ฐ
46
+ error_hidden = np.dot(delta_output, W2.T)
47
+ delta_hidden = error_hidden * sigmoid_derivative(hidden_activation)
48
+
49
+ # 3๋‹จ๊ณ„: ๊ฐ€์ค‘์น˜์™€ ํŽธํ–ฅ ์—…๋ฐ์ดํŠธ
50
+ W2 += np.dot(hidden_activation.T, delta_output) * learning_rate
51
+ b2 += np.sum(delta_output, axis=0) * learning_rate
52
+ W1 += np.dot(X.T, delta_hidden) * learning_rate
53
+ b1 += np.sum(delta_hidden, axis=0) * learning_rate
54
+
55
+ # ๋งค 1000๋ฒˆ์งธ epoch๋งˆ๋‹ค ์˜ค์ฐจ ์ถœ๋ ฅ
56
+ if epoch % 1000 == 0:
57
+ loss = np.mean(np.abs(error_output))
58
+ print(f"Epoch: {epoch}, Loss: {loss:.4f}")
59
+
60
+ print("\n--- ํ•™์Šต ์™„๋ฃŒ ---")
61
+
62
+ # 5. ํ•™์Šต๋œ ๋ชจ๋ธ๋กœ ์˜ˆ์ธก (๊ฒฐ๊ณผ ํ™•์ธ)
63
+ hidden_output_final = np.dot(X, W1) + b1
64
+ hidden_activation_final = sigmoid(hidden_output_final)
65
+ predicted_final = sigmoid(np.dot(hidden_activation_final, W2) + b2)
66
+
67
+ print("์ž…๋ ฅ ๋ฐ์ดํ„ฐ:\n", X)
68
+ print("์˜ˆ์ธก ๊ฒฐ๊ณผ:\n", predicted_final.round())
69
+ print("์ •๋‹ต ๋ ˆ์ด๋ธ”:\n", y)