diff --git a/school_project/test/models/cpu/utils/test_model.py b/school_project/test/models/cpu/utils/test_model.py index e69de29..f0d2156 100644 --- a/school_project/test/models/cpu/utils/test_model.py +++ b/school_project/test/models/cpu/utils/test_model.py @@ -0,0 +1,119 @@ +import unittest + +# Test XOR implementation of Model for its lesser computation time +from school_project.models.cpu.xor import XORModel + +class TestModel(unittest.TestCase): + """Unit tests for model module.""" + def __init__(self, *args, **kwargs) -> None: + """Initialise unit tests and inputs.""" + super(TestModel, self).__init__(*args, **kwargs) + + def test_train_dataset_size(self) -> None: + """Test the size of training dataset to be value chosen.""" + train_dataset_size = 4 + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = train_dataset_size, + learning_rate = 0.1, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + self.assertEqual(first=model.layers.head.input.shape[1], + second=train_dataset_size) + + def test_network_shape(self) -> None: + """Test the neuron count of each layer to match the set shape of the + network.""" + layers_shape = [2, 100, 100, 1] + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = 0.1, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + for count, layer in enumerate(model.layers): + self.assertEqual(first=layer.input_neuron_count, + second=layers_shape[count]) + + def test_learning_rates(self) -> None: + """Test learning rate of each layer to be the same.""" + learning_rate = 0.1 + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = learning_rate, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + for layer in model.layers: + self.assertEqual(first=layer.learning_rate, second=learning_rate) + + def test_relu_model_transfer_types(self) -> None: + """Test transfer type of each layer to match whats set.""" + transfer_types = ['relu', 'relu', 'sigmoid'] + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = 0.1, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + for count, layer in enumerate(model.layers): + self.assertEqual(first=layer.transfer_type, + second=transfer_types[count]) + + def test_sigmoid_model_transfer_types(self) -> None: + """Test transfer type of each layer to match whats set.""" + transfer_types = ['sigmoid', 'sigmoid', 'sigmoid'] + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = 0.1, + use_relu = False) + model.create_model_values() + model.train(epoch_count=1) + for count, layer in enumerate(model.layers): + self.assertEqual(first=layer.transfer_type, + second=transfer_types[count]) + + def test_weight_matrice_shapes(self) -> None: + """Test that each layer's weight matrix has the same number of columns + as the layer's input matrix's number of rows, for the matrice + multiplication.""" + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = 0.1, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + for layer in model.layers: + self.assertEqual(first=layer.weights.shape[1], + second=layer.input.shape[0]) + + def test_bias_matrice_shapes(self) -> None: + """Test that each layer's bias matrix has the same number of rows + as the result of the layer's weights and input multiplication, for + element-wise addition of the biases.""" + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = 0.1, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + for layer in model.layers: + self.assertEqual(first=layer.biases.shape[0], + second=layer.weights.shape[0]) + + def test_layer_output_shapes(self) -> None: + """Test the shape of each layer's activation function's output.""" + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = 0.1, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + for layer in model.layers: + self.assertEqual( + first=(layer.weights.shape[0], layer.input.shape[1]), + second=layer.output.shape + ) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/school_project/test/models/cpu/utils/test_tools.py b/school_project/test/models/cpu/utils/test_tools.py index 1603d6f..e982063 100644 --- a/school_project/test/models/cpu/utils/test_tools.py +++ b/school_project/test/models/cpu/utils/test_tools.py @@ -1,37 +1,26 @@ import unittest + from school_project.models.cpu.utils import tools class TestTools(unittest.TestCase): - def __init__(self, *args, **kwargs): + """Unit tests for the tools module.""" + def __init__(self, *args, **kwargs) -> None: + """Initialise unit tests.""" super(TestTools, self).__init__(*args, **kwargs) - def test_sigmoid(self) -> None: - """Test sigmoid output range to be within 0-1. - - Raises: - AssertionError: if sigmoid output range is not within 0-1. - - """ - test_inputs = [-100,0,100] - for test_input in test_inputs: - output = tools.sigmoid(test_input) - self.assertTrue(output >= 0 and output <= 1, - "Sigmoid should return a number between 0 and 1") - def test_relu(self) -> None: - """Test ReLu output range to be >=0. - - Raises: - AssertionError: if relu output range is not >=0. - - """ - test_inputs = [-100,0,100] + """Test ReLu output range to be >=0.""" + test_inputs = [-100, 0, 100] + for test_input in test_inputs: + output = tools.relu(z=test_input) + self.assertGreaterEqual(a=output, b=0) + + def test_sigmoid(self) -> None: + """Test sigmoid output range to be within 0-1.""" + test_inputs = [-100, 0, 100] for test_input in test_inputs: - output = tools.relu(test_input) - self.assertTrue( - output >= 0, - "ReLu should return a number greater than or equal to 0" - ) + output = tools.sigmoid(z=test_input) + self.assertTrue(expr=output >= 0 and output <= 1) if __name__ == '__main__': unittest.main() \ No newline at end of file diff --git a/school_project/test/models/gpu/utils/test_model.py b/school_project/test/models/gpu/utils/test_model.py index e69de29..688364b 100644 --- a/school_project/test/models/gpu/utils/test_model.py +++ b/school_project/test/models/gpu/utils/test_model.py @@ -0,0 +1,119 @@ +import unittest + + +# Test XOR implementation of Model for its lesser computation time +from school_project.models.gpu.xor import XORModel + +class TestModel(unittest.TestCase): + """Unit tests for model module.""" + def __init__(self, *args, **kwargs) -> None: + """Initialise unit tests and inputs.""" + super(TestModel, self).__init__(*args, **kwargs) + + def test_train_dataset_size(self) -> None: + """Test the size of training dataset to be value chosen.""" + train_dataset_size = 4 + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = train_dataset_size, + learning_rate = 0.1, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + self.assertEqual(first=model.layers.head.input.shape[1], + second=train_dataset_size) + + def test_network_shape(self) -> None: + """Test the neuron count of each layer to match the set shape of the + network.""" + layers_shape = [2, 100, 100, 1] + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = 0.1, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + for count, layer in enumerate(model.layers): + self.assertEqual(first=layer.input_neuron_count, + second=layers_shape[count]) + + def test_learning_rates(self) -> None: + """Test learning rate of each layer to be the same.""" + learning_rate = 0.1 + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = learning_rate, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + for layer in model.layers: + self.assertEqual(first=layer.learning_rate, second=learning_rate) + + def test_relu_model_transfer_types(self) -> None: + """Test transfer type of each layer to match whats set.""" + transfer_types = ['relu', 'relu', 'sigmoid'] + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = 0.1, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + for count, layer in enumerate(model.layers): + self.assertEqual(first=layer.transfer_type, + second=transfer_types[count]) + + def test_sigmoid_model_transfer_types(self) -> None: + """Test transfer type of each layer to match whats set.""" + transfer_types = ['sigmoid', 'sigmoid', 'sigmoid'] + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = 0.1, + use_relu = False) + model.create_model_values() + model.train(epoch_count=1) + for count, layer in enumerate(model.layers): + self.assertEqual(first=layer.transfer_type, + second=transfer_types[count]) + + def test_weight_matrice_shapes(self) -> None: + """Test that each layer's weight matrix has the same number of columns + as the layer's input matrix's number of rows, for the matrice + multiplication.""" + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = 0.1, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + for layer in model.layers: + self.assertEqual(first=layer.weights.shape[1], + second=layer.input.shape[0]) + + def test_bias_matrice_shapes(self) -> None: + """Test that each layer's bias matrix has the same number of rows + as the result of the layer's weights and input multiplication, for + element-wise addition of the biases.""" + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = 0.1, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + for layer in model.layers: + self.assertEqual(first=layer.biases.shape[0], + second=layer.weights.shape[0]) + + def test_layer_output_shapes(self) -> None: + """Test the shape of each layer's activation function's output.""" + model = XORModel(hidden_layers_shape = [100, 100], + train_dataset_size = 4, + learning_rate = 0.1, + use_relu = True) + model.create_model_values() + model.train(epoch_count=1) + for layer in model.layers: + self.assertEqual( + first=(layer.weights.shape[0], layer.input.shape[1]), + second=layer.output.shape) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/school_project/test/models/gpu/utils/test_tools.py b/school_project/test/models/gpu/utils/test_tools.py index c30eb8b..b7ea1a6 100644 --- a/school_project/test/models/gpu/utils/test_tools.py +++ b/school_project/test/models/gpu/utils/test_tools.py @@ -1,37 +1,26 @@ import unittest + from school_project.models.gpu.utils import tools class TestTools(unittest.TestCase): - def __init__(self, *args, **kwargs): + """Unit tests for the tools module.""" + def __init__(self, *args, **kwargs) -> None: + """Initialise unit tests and inputs.""" super(TestTools, self).__init__(*args, **kwargs) - - def test_sigmoid(self) -> None: - """Test sigmoid output range to be within 0-1. - - Raises: - AssertionError: if sigmoid output range is not within 0-1. - - """ - test_inputs = [-100,0,100] - for test_input in test_inputs: - output = tools.sigmoid(test_input) - self.assertTrue(output >= 0 and output <= 1, - "Sigmoid should return a number between 0 and 1") def test_relu(self) -> None: - """Test ReLu output range to be >=0. - - Raises: - AssertionError: if relu output range is not >=0. - - """ - test_inputs = [-100,0,100] + """Test ReLu output range to be >=0. """ + test_inputs = [-100, 0, 100] + for test_input in test_inputs: + output = tools.relu(z=test_input) + self.assertGreaterEqual(a=output, b=0) + + def test_sigmoid(self) -> None: + """Test sigmoid output range to be within 0-1.""" + test_inputs = [-100, 0, 100] for test_input in test_inputs: - output = tools.relu(test_input) - self.assertTrue( - output >= 0, - "ReLu should return a number greater than or equal to 0" - ) + output = tools.sigmoid(z=test_input) + self.assertTrue(expr=output >= 0 and output <= 1) if __name__ == '__main__': unittest.main() \ No newline at end of file