-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathverify.py
More file actions
205 lines (168 loc) · 6.35 KB
/
verify.py
File metadata and controls
205 lines (168 loc) · 6.35 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
"""
Simple Verification Test
Tests the core components to ensure they work correctly.
"""
import numpy as np
import sys
import os
# Add current directory to path
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
print("=" * 80)
print("NumPy CNN Implementation - Component Verification")
print("=" * 80)
print()
# Test 1: Import all modules
print("Test 1: Importing modules...")
try:
from layers import Conv2D, MaxPool2D, Flatten, Dense, Dropout, BatchNorm2D
from activations import ReLU, Softmax, Sigmoid, LeakyReLU
from losses import CrossEntropyLoss, MeanSquaredError
from optimizers import SGD, Adam, RMSprop
from network import NeuralNetwork
from data_utils import normalize, one_hot_encode
print("✓ All modules imported successfully!")
except Exception as e:
print(f"✗ Import failed: {e}")
sys.exit(1)
print()
# Test 2: Conv2D forward pass
print("Test 2: Conv2D forward pass...")
try:
conv = Conv2D(in_channels=1, out_channels=8, kernel_size=3, padding=1)
x = np.random.randn(2, 1, 28, 28).astype(np.float32)
out = conv.forward(x, training=True)
assert out.shape == (2, 8, 28, 28), f"Expected shape (2, 8, 28, 28), got {out.shape}"
print(f"✓ Conv2D output shape: {out.shape}")
except Exception as e:
print(f"✗ Conv2D test failed: {e}")
print()
# Test 3: Conv2D backward pass
print("Test 3: Conv2D backward pass...")
try:
grad_out = np.random.randn(*out.shape).astype(np.float32)
grad_in = conv.backward(grad_out)
assert grad_in.shape == x.shape, f"Expected gradient shape {x.shape}, got {grad_in.shape}"
assert conv.grad_weights is not None, "Weights gradient not computed"
assert conv.grad_bias is not None, "Bias gradient not computed"
print(f"✓ Conv2D gradients computed correctly")
except Exception as e:
print(f"✗ Conv2D backward test failed: {e}")
print()
# Test 4: MaxPool forward and backward
print("Test 4: MaxPool forward and backward...")
try:
pool = MaxPool2D(pool_size=2)
x_pool = np.random.randn(2, 8, 28, 28).astype(np.float32)
out_pool = pool.forward(x_pool, training=True)
assert out_pool.shape == (2, 8, 14, 14), f"Expected shape (2, 8, 14, 14), got {out_pool.shape}"
grad_out_pool = np.random.randn(*out_pool.shape).astype(np.float32)
grad_in_pool = pool.backward(grad_out_pool)
assert grad_in_pool.shape == x_pool.shape
print(f"✓ MaxPool works correctly")
except Exception as e:
print(f"✗ MaxPool test failed: {e}")
print()
# Test 5: Dense layer
print("Test 5: Dense layer...")
try:
dense = Dense(in_features=784, out_features=10)
x_dense = np.random.randn(32, 784).astype(np.float32)
out_dense = dense.forward(x_dense, training=True)
assert out_dense.shape == (32, 10)
grad_out_dense = np.random.randn(*out_dense.shape).astype(np.float32)
grad_in_dense = dense.backward(grad_out_dense)
assert grad_in_dense.shape == x_dense.shape
print(f"✓ Dense layer works correctly")
except Exception as e:
print(f"✗ Dense test failed: {e}")
print()
# Test 6: Activation functions
print("Test 6: Activation functions...")
try:
relu = ReLU()
x_act = np.array([-1, 0, 1, 2]).astype(np.float32)
out_act = relu.forward(x_act)
expected = np.array([0, 0, 1, 2]).astype(np.float32)
assert np.allclose(out_act, expected), f"ReLU failed: {out_act} vs {expected}"
print(f"✓ ReLU works correctly")
softmax = Softmax()
x_soft = np.array([[1, 2, 3], [1, 2, 3]]).astype(np.float32)
out_soft = softmax.forward(x_soft)
assert np.allclose(np.sum(out_soft, axis=1), 1.0), "Softmax doesn't sum to 1"
print(f"✓ Softmax works correctly")
except Exception as e:
print(f"✗ Activation test failed: {e}")
print()
# Test 7: Loss functions
print("Test 7: Loss functions...")
try:
loss_fn = CrossEntropyLoss()
pred = np.array([[0.1, 0.2, 0.7], [0.8, 0.1, 0.1]]).astype(np.float32)
target = np.array([2, 0])
loss = loss_fn.forward(pred, target)
assert isinstance(loss, (float, np.floating)), "Loss should be a scalar"
assert loss > 0, "Loss should be positive"
grad = loss_fn.backward(pred, target)
assert grad.shape == pred.shape
print(f"✓ CrossEntropyLoss works correctly (loss: {loss:.4f})")
except Exception as e:
print(f"✗ Loss function test failed: {e}")
print()
# Test 8: Optimizers
print("Test 8: Optimizers...")
try:
opt = Adam(learning_rate=0.001)
params = {'weights': np.random.randn(10, 10).astype(np.float32)}
grads = {'weights': np.random.randn(10, 10).astype(np.float32)}
params_before = params['weights'].copy()
opt.update(params, grads)
assert not np.allclose(params['weights'], params_before), "Parameters should be updated"
print(f"✓ Adam optimizer works correctly")
except Exception as e:
print(f"✗ Optimizer test failed: {e}")
print()
# Test 9: Network class
print("Test 9: Network class...")
try:
model = NeuralNetwork()
model.add(Conv2D(1, 16, kernel_size=3, padding=1))
model.add(ReLU())
model.add(MaxPool2D(pool_size=2))
model.add(Flatten())
model.add(Dense(14*14*16, 10))
model.add(Softmax())
model.compile(loss=CrossEntropyLoss(), optimizer=Adam())
# Test forward pass
x_net = np.random.randn(4, 1, 28, 28).astype(np.float32)
out_net = model.forward(x_net, training=False)
assert out_net.shape == (4, 10)
print(f"✓ Network class works correctly")
except Exception as e:
print(f"✗ Network test failed: {e}")
print()
# Test 10: Mini training loop
print("Test 10: Mini training loop...")
try:
# Create tiny dataset
X = np.random.randn(16, 1, 28, 28).astype(np.float32)
y = np.random.randint(0, 10, size=16)
# Train for 2 epochs
model.fit(X, y, epochs=2, batch_size=8, verbose=0)
# Predict
predictions = model.predict(X[:4])
assert predictions.shape == (4, 10)
print(f"✓ Training loop works correctly")
except Exception as e:
print(f"✗ Training loop test failed: {e}")
print()
print("=" * 80)
print("All core components verified successfully! ✓")
print("=" * 80)
print()
print("The NumPy CNN implementation is working correctly.")
print("You can now:")
print(" - Explore the code in each module")
print(" - Read the comprehensive README.md")
print(" - Try running train_example.py for a full training example")
print(" - Experiment with different architectures")
print()