-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest_core.py
More file actions
139 lines (109 loc) · 4.62 KB
/
test_core.py
File metadata and controls
139 lines (109 loc) · 4.62 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
"""
Tests for Core KortexDL components (Network, Layer)
"""
import pytest
import numpy as np
import tempfile
import os
try:
import _kortexdl_core as bd
BINDINGS_AVAILABLE = True
except ImportError:
BINDINGS_AVAILABLE = False
bd = None
pytestmark = pytest.mark.requires_build
class TestLayer:
"""Tests for Layer class"""
def test_creation(self):
"""Test Layer creation"""
layer = bd.Layer(10, 5, bd.ActivationType.ReLU)
assert layer.input_size == 10
assert layer.output_size == 5
assert layer.activation == bd.ActivationType.ReLU
def test_weights_access(self):
"""Test accessing weights and biases"""
layer = bd.Layer(2, 2, bd.ActivationType.Linear)
layer.initialize_weights()
weights = layer.get_weights()
biases = layer.get_biases()
# Expect flat list/vector
assert len(weights) == 4 # 2x2
assert len(biases) == 2 # 2
def test_set_parameters(self):
"""Test manual parameter setting"""
layer = bd.Layer(2, 1, bd.ActivationType.Linear)
# Layer.set_parameters expects List[List[float]] for weights (Row-Major view of matrix)
# Weights: 1 output, 2 inputs -> [[w1, w2]]
new_weights = [[1.5, 2.5]]
new_biases = [0.5]
layer.set_parameters(new_weights, new_biases)
# Verify
w_read = layer.get_weights() # Flat: [1.5, 2.5] (or [1.5, 2.5] checks order)
# Layer (internal Col-Major): if set as row-major input, internally stored correctly.
# Check values match.
assert w_read[0] == 1.5
assert w_read[1] == 2.5
assert layer.get_biases()[0] == 0.5
def test_forward_backward(self):
"""Test Forward and Backward manual calls"""
layer = bd.Layer(2, 1, bd.ActivationType.Linear)
# Set Identity: W=[1, 0], b=[0] ?? No, W=[1, 1], b=0
layer.set_parameters([[1.0, 1.0]], [0.0])
# Input: [1, 2]
inputs = np.array([1.0, 2.0], dtype=np.float32)
batch_size = 1
# Forward
output = layer.forward(inputs, batch_size)
# Out = 1*1 + 1*2 = 3
np.testing.assert_almost_equal(output[0], 3.0)
# Backward
grad_out = np.array([1.0], dtype=np.float32)
grad_in = layer.backward(inputs, grad_out, 0.0, batch_size) # lr=0
# Check grad weights (internal)
gw = layer.get_grad_weights()
# Grad W = Input * GradOut = [1*1, 2*1] = [1, 2]
np.testing.assert_almost_equal(gw[0], 1.0)
np.testing.assert_almost_equal(gw[1], 2.0)
class TestNetwork:
"""Tests for Network class"""
def test_creation(self):
"""Test Network creation via topology list"""
# [Input, Hidden, Output]
net = bd.Network([10, 20, 5], bd.ActivationType.ReLU)
# 2 Layers: 10->20, 20->5
layers = net.getLayersMutable()
assert len(layers) == 2
assert layers[0].input_size == 10
assert layers[0].output_size == 20
assert layers[1].input_size == 20
assert layers[1].output_size == 5
def test_forward_pass(self):
"""Test full network forward pass"""
net = bd.Network([2, 2], bd.ActivationType.Linear)
# Layer 0: 2->2.
# Init weights
layer = net.getLayersMutable()[0]
layer.set_parameters([[1.0, 0.0], [0.0, 1.0]], [0.0, 0.0]) # Identity
inputs = np.array([5.0, 10.0], dtype=np.float32)
output = net.forward(inputs, 1, False) # Batch=1, inference
np.testing.assert_almost_equal(output, [5.0, 10.0])
def test_serialization(self):
"""Test saveModel and loadModel"""
net = bd.Network([5, 5], bd.ActivationType.ReLU)
with tempfile.NamedTemporaryFile(delete=False) as tmp:
tmp_path = tmp.name
try:
# Save
assert net.saveModel(tmp_path)
# Load
net2 = bd.Network([5, 5], bd.ActivationType.ReLU) # Structure must match for old loading?
# loadModel static or member? Member.
assert net2.loadModel(tmp_path)
# Check properties (weights shouldn't be zero everywhere after random init)
# Actually, we should check if they match net
w1 = net.getLayersMutable()[0].get_weights()
w2 = net2.getLayersMutable()[0].get_weights()
np.testing.assert_allclose(w1, w2)
finally:
if os.path.exists(tmp_path):
os.remove(tmp_path)