From 7045a808cf769bff040f1f79db206a00a9d70347 Mon Sep 17 00:00:00 2001 From: Tim Koornstra Date: Fri, 18 Aug 2023 15:21:04 +0200 Subject: [PATCH] Small refactor of VGSLModelGenerator + improved testing test_model_creation.py also tests all the exceptions --- src/vgsl_model_generator.py | 270 +++++++++++++++-------------------- tests/test_model_creation.py | 264 +++++++++++++++++++++++++++++++++- 2 files changed, 377 insertions(+), 157 deletions(-) diff --git a/src/vgsl_model_generator.py b/src/vgsl_model_generator.py index c00170ad..e297f533 100644 --- a/src/vgsl_model_generator.py +++ b/src/vgsl_model_generator.py @@ -124,10 +124,10 @@ def __init__(self, # TODO: Add model_name argument to arg_parser.py self.model_name = "custom_model" - except Exception: - raise ValueError("Something is wrong with the input string, " - "please check the VGSL-spec formatting " - "with the documentation.") + except (TypeError, AttributeError) as e: + raise ("Something is wrong with the input string, " + "please check the VGSL-spec formatting " + "with the documentation.") from e def init_model_from_string(self, vgsl_spec_string: str, @@ -197,8 +197,12 @@ def init_model_from_string(self, setattr(self, f"dropout{index}", self.dropout_generator(layer)) self.history.append(f"dropout{index}") elif layer.startswith('R'): - self.history.append("reshape"+str(index)+"_"+(layer)) - elif layer.startswith('O1'): + prev_layer = self.inputs if not self.history else getattr( + self, self.history[-1]) + setattr(self, f"reshape{index}", + self.reshape_generator(layer, prev_layer)) + self.history.append(f"reshape{index}") + elif layer.startswith('O'): setattr(self, f"output{index}", self.get_output_layer(layer, output_classes)) self.history.append(f"output{index}") @@ -220,10 +224,7 @@ def build(self) -> tf.keras.models.Model: logging.info("Building model for: %s", self.selected_model_vgsl_spec) x = self.inputs for index, layer in enumerate(self.history): - if (layer.startswith("reshape")): - x = self.reshape_generator(layer.split("_")[1], x)(x) - else: - x = getattr(self, layer)(x) + x = getattr(self, layer)(x) output = layers.Activation('linear', dtype=tf.float32)(x) logging.info("Model has been built\n") @@ -359,11 +360,12 @@ def get_activation_function(nonlinearity: str) -> str: mapping = {'s': 'sigmoid', 't': 'tanh', 'r': 'relu', 'e': 'elu', 'l': 'linear', 'm': 'softmax'} - activation = mapping.get(nonlinearity) - if activation is None: + + if nonlinearity not in mapping: raise ValueError( - f"Unsupported nonlinearity {nonlinearity} provided.") - return activation + f"Unsupported nonlinearity '{nonlinearity}' provided.") + + return mapping[nonlinearity] def make_input_layer(self, inputs: str, @@ -446,16 +448,27 @@ def conv2d_generator(self, """ - # Get activation function - activation = self.get_activation_function(layer[1]) - if not activation: - raise ValueError( - f"Invalid activation function specified in {layer}") - # Extract convolutional parameters conv_filter_params = [int(match) for match in re.findall(r'\d+', layer)] + # Check if the layer format is as expected + if len(conv_filter_params) < 3: + raise ValueError(f"Conv layer {layer} has too few parameters. " + "Expected format: C,, or C,," + ",,") + elif len(conv_filter_params) > 5: + raise ValueError(f"Conv layer {layer} has too many parameters. " + "Expected format: C,, or C,,," + ",") + + # Get activation function + try: + activation = self.get_activation_function(layer[1]) + except ValueError: + raise ValueError( + f"Invalid activation function specified in {layer}") + # Check parameter length and generate corresponding Conv2D layer if len(conv_filter_params) == 3: x, y, d = conv_filter_params @@ -476,16 +489,6 @@ def conv2d_generator(self, activation=activation, kernel_initializer=self._initializer) - # Error handling - if len(conv_filter_params) < 3: - raise ValueError(f"Conv layer {layer} has too few parameters. " - "Expected format: C,, or C,," - ",,") - else: - raise ValueError(f"Conv layer {layer} has too many parameters. " - "Expected format: C,, or C,,," - ",") - def maxpool_generator(self, layer: str) -> tf.keras.layers.MaxPooling2D: """ @@ -520,7 +523,7 @@ def maxpool_generator(self, # Extract pooling and stride parameters pool_stride_params = [int(match) - for match in re.findall(r'\d+', layer)] + for match in re.findall(r'-?\d+', layer)] # Check if the parameters are as expected if len(pool_stride_params) != 4: @@ -573,13 +576,13 @@ def avgpool_generator(self, # Extract pooling and stride parameters pool_stride_params = [int(match) - for match in re.findall(r'\d+', layer)] + for match in re.findall(r'-?\d+', layer)] # Check if the parameters are as expected if len(pool_stride_params) != 4: raise ValueError(f"AvgPool layer {layer} does not have the " "expected number of parameters. Expected format: " - "Mp,,,") + "Ap,,,") pool_x, pool_y, stride_x, stride_y = pool_stride_params @@ -635,7 +638,7 @@ def reshape_generator(self, """ # Check if the layer format is as expected - if not layer or len(layer) < 2: + if len(layer) < 2: raise ValueError(f"Reshape layer {layer} is of unexpected format. " "Expected format: Rc.") @@ -662,7 +665,7 @@ def fc_generator(self, VGSL specification for the fully connected layer. Expected format: `F(s|t|r|l|m)` - `(s|t|r|l|m)`: Non-linearity type. One of sigmoid, tanh, relu, - elu, or none. + linear, or softmax. - ``: Number of outputs. Returns @@ -695,30 +698,27 @@ def fc_generator(self, """ # Ensure the layer string format is as expected - if not layer or len(layer) < 2: + if not re.match(r'^F[a-z]-?\d+$', layer): raise ValueError( f"Dense layer {layer} is of unexpected format. Expected " - "format: F(s|t|r|l|m).") - - activation = self.get_activation_function(layer[1]) + "format: F(s|t|r|l|m)." + ) # Check if the activation function is valid # or any other supported activations - if activation not in ['sigmoid', 'tanh', 'relu', 'linear', 'softmax']: - raise ValueError( - f"Invalid activation {activation} for Dense layer {layer}.") - try: - n = int(re.search(r'\d+$', layer).group()) - except (AttributeError, ValueError): + activation = self.get_activation_function(layer[1]) + except ValueError: raise ValueError( - "Failed to extract the number of neurons from Dense layer " + f"Invalid activation '{layer[1]}' for Dense layer " f"{layer}.") - # Check if the number of neurons is valid + # Extract the number of neurons + n = int(layer[2:]) if n <= 0: raise ValueError( - f"Invalid number of neurons {n} for Dense layer {layer}.") + f"Invalid number of neurons {n} for Dense layer {layer}." + ) return layers.Dense(n, activation=activation, @@ -758,26 +758,15 @@ def lstm_generator(self, """ - # Ensure the layer string format is as expected - if not layer or len(layer) < 2: + # Extract direction, summarization, and units + match = re.match(r'L([fr])(s?)(-?\d+)$', layer) + if not match: raise ValueError( f"LSTM layer {layer} is of unexpected format. Expected " "format: L(f|r)[s].") - direction = layer[1] - - # Validate direction - if direction not in ['f', 'r']: - raise ValueError( - f"Invalid direction {direction} for LSTM layer {layer}. " - "Expected 'f' (forward) or 'r' (reverse).") - - try: - n = int(re.search(r'\d+$', layer).group()) - except (AttributeError, ValueError): - raise ValueError( - "Failed to extract the number of units from LSTM layer " - f"{layer}.") + direction, summarize, n = match.groups() + n = int(n) # Check if the number of units is valid if n <= 0: @@ -828,26 +817,17 @@ def gru_generator(self, """ - # Ensure the layer string format is as expected - if not layer or len(layer) < 2: + # Extract direction, summarization, and units + match = re.match(r'G([fr])(s?)(-?\d+)$', layer) + if not match: raise ValueError( f"GRU layer {layer} is of unexpected format. Expected " "format: G(f|r)[s].") - direction = layer[1] - - # Validate direction - if direction not in ['f', 'r']: - raise ValueError( - f"Invalid direction {direction} for GRU layer {layer}. " - "Expected 'f' (forward) or 'r' (reverse).") + direction, summarize, n = match.groups() - try: - n = int(re.search(r'\d+$', layer).group()) - except (AttributeError, ValueError): - raise ValueError( - "Failed to extract the number of units from GRU layer " - f"{layer}.") + # Convert n to integer + n = int(n) # Check if the number of units is valid if n <= 0: @@ -856,7 +836,7 @@ def gru_generator(self, gru_params = { "units": n, - "return_sequences": 's' in layer, + "return_sequences": bool(summarize), "go_backwards": direction == 'r', "kernel_initializer": self._initializer } @@ -904,34 +884,24 @@ def bidirectional_generator(self, runs it in both forward and backward directions. """ - # Ensure the layer string format is as expected - if not layer or len(layer) < 3: + # Extract layer type and units + match = re.match(r'B([gl])(-?\d+)$', layer) + if not match: raise ValueError(f"Layer {layer} is of unexpected format. " - "Expected format: B(g|l) where 'l' stands for " - "LSTM, 'g' stands for GRU, and 'n' is the number " + "Expected format: B(g|l) where 'g' stands for " + "GRU, 'l' stands for LSTM, and 'n' is the number " "of units.") - # Extract and validate units - units_match = re.search(r'\d+$', layer) - if not units_match: - raise ValueError( - f"Failed to extract the number of units from layer {layer}.") + layer_type, units = match.groups() + units = int(units) - units = int(units_match.group()) + # Check if the number of units is valid if units <= 0: raise ValueError( f"Invalid number of units {units} for layer {layer}.") - # Determine and validate the RNN layer type - layer_type = layer[1] - if layer_type == "l": - rnn_layer = layers.LSTM - elif layer_type == "g": - rnn_layer = layers.GRU - else: - raise ValueError( - f"Invalid RNN layer type {layer_type} in layer {layer}. " - "Expected 'l' for LSTM or 'g' for GRU.") + # Determine the RNN layer type + rnn_layer = layers.LSTM if layer_type == 'l' else layers.GRU rnn_params = { "units": units, @@ -951,7 +921,9 @@ def residual_block_generator(self, ---------- layer : str VGSL specification for the Residual Block. Expected format: - `RB,,` + `RB[d],,` + - `[d]`: Optional downsample flag. If provided, the block will + downsample the input. - ``, ``: Kernel sizes in the x and y dimensions respectively. - ``: Depth of the Conv2D layers within the Residual Block. @@ -975,29 +947,22 @@ def residual_block_generator(self, """ - # Ensure the layer string format is as expected - if not layer: - raise ValueError(f"Layer {layer} is of unexpected format.") - - # Extract convolution parameters - conv_params = [int(match) for match in re.findall(r'\d+', layer)] - - if len(conv_params) != 3: - raise ValueError(f"Layer {layer} is expected to contain 3 integer " - "parameters: x, y, and d.") + match = re.match(r'RB([d]?)(-?\d+),(-?\d+),(-?\d+)$', layer) + if not match: + raise ValueError( + f"Layer {layer} is of unexpected format. Expected format: " + "RB[d],,.") - x, y, d = conv_params + downsample, x, y, d = match.groups() + x, y, d = map(int, (x, y, d)) - # Validate the extracted parameters + # Validate parameters if any(val <= 0 for val in [x, y, d]): - raise ValueError(f"Invalid parameters x={x}, y={y}, d={d} in " - f"layer {layer}. All values should be positive " - "integers.") - - # Check for downsampling - downsample = 'd' in layer + raise ValueError( + f"Invalid parameters x={x}, y={y}, d={d} in layer {layer}. " + "All values should be positive integers.") - return ResidualBlock(d, x, y, self._initializer, downsample) + return ResidualBlock(d, x, y, self._initializer, bool(downsample)) def dropout_generator(self, layer: str) -> tf.keras.layers.Dropout: @@ -1008,8 +973,8 @@ def dropout_generator(self, ---------- layer : str VGSL specification for the Dropout layer. Expected format: - `Do` - - ``: Dropout percentage (0-100). + `D` + - ``: Dropout percentage (0-100). Returns ------- @@ -1030,21 +995,20 @@ def dropout_generator(self, """ - # Ensure the layer string format is as expected - if not layer: - raise ValueError(f"Layer {layer} is of unexpected format.") + # Validate layer format and extract dropout rate + match = re.match(r'D(-?\d+)$', layer) + if not match: + raise ValueError( + f"Layer {layer} is of unexpected format. Expected format: " + "D where rate is between 0 and 100.") - # Extract and validate the dropout rate - dropout_match = re.search(r'\d+$', layer) - if not dropout_match: - raise ValueError("Could not extract dropout rate from layer " - f"{layer}.") + dropout_rate = int(match.group(1)) - dropout = int(dropout_match.group()) - if dropout < 0 or dropout > 100: + # Validate dropout rate + if dropout_rate < 0 or dropout_rate > 100: raise ValueError("Dropout rate must be in the range [0, 100].") - return layers.Dropout(dropout / 100) + return layers.Dropout(dropout_rate / 100) def get_output_layer(self, layer: str, @@ -1056,9 +1020,9 @@ def get_output_layer(self, ---------- layer : str VGSL specification for the output layer. Expected format: - `O(2|1|0)(l|s|c)` + `O(2|1|0)(l|s)` - `(2|1|0)`: Dimensionality of the output. - - `(l|s|c)`: Non-linearity type. + - `(l|s)`: Non-linearity type. - ``: Number of output classes. output_classes : int Number of output classes to overwrite the classes defined in the @@ -1083,23 +1047,22 @@ def get_output_layer(self, """ - # Check the layer format - if not re.match(r'O[210][lsc]\d+$', layer): - raise ValueError(f"Layer {layer} is of unexpected format.") - - # Extract necessary components - dimensionality, linearity, classes_str = layer[1], layer[2], layer[3:] + # Validate layer format + match = re.match(r'O([210])([a-z])(\d+)$', layer) + if not match: + raise ValueError( + f"Layer {layer} is of unexpected format. Expected format: " + "O[210](l|s).") - try: - classes = int(classes_str) - except ValueError: - raise ValueError("Could not extract number of classes from layer " - f"{layer}.") + dimensionality, linearity, classes = match.groups() + classes = int(classes) + # Handle potential mismatch in specified classes and provided + # output_classes if output_classes and classes != output_classes: logging.warning( - "Overwriting output classes from input string. " - "Was: %s, now: %s", classes, output_classes) + "Overwriting output classes from input string. Was: %s, now: " + "%s", classes, output_classes) classes = output_classes self.selected_model_vgsl_spec[-1] = (f"O{dimensionality}" f"{linearity}{classes}") @@ -1108,9 +1071,10 @@ def get_output_layer(self, return layers.Dense(classes, activation='softmax', kernel_initializer=self._initializer) - elif linearity == "c": - return CTCLayer(name='ctc_loss') - + elif linearity == "l": + return layers.Dense(classes, + activation='linear', + kernel_initializer=self._initializer) else: - raise ValueError(f"Output layer linearity {linearity} is not " - "supported.") + raise ValueError( + f"Output layer linearity {linearity} is not supported.") diff --git a/tests/test_model_creation.py b/tests/test_model_creation.py index ccaaa177..52ed6be2 100644 --- a/tests/test_model_creation.py +++ b/tests/test_model_creation.py @@ -3,7 +3,6 @@ # > Third party dependencies import numpy as np -import tensorflow as tf from tensorflow.keras import layers from tensorflow.keras import activations @@ -112,6 +111,30 @@ def test_conv2d_layer(self): self.assertEqual(model.layers[4].activation, activations.linear) self.assertEqual(model.layers[5].activation, activations.softmax) + # Error handling tests + # Check that an error is raised when an invalid number of parameters + # is specified + vgsl_spec_string = "None,64,None,1 C3,32 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertIn("Conv layer C3,32 has too few parameters.", + str(context.exception)) + + vgsl_spec_string = "None,64,None,1 C3,3,2,2,32,4 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertIn( + "Conv layer C3,3,2,2,32,4 has too many parameters.", + str(context.exception)) + + # Check that an error is raised when an invalid activation function is + # specified + vgsl_spec_string = "None,64,None,1 Cz3,3,32 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual("Invalid activation function specified in Cz3,3,32", + str(context.exception)) + def test_maxpool_layer(self): vgsl_spec_string = "None,64,None,1 Mp2,2,2,2 O1s10" model_generator = self.VGSLModelGenerator(vgsl_spec_string) @@ -139,6 +162,27 @@ def test_maxpool_layer(self): self.assertEqual(height, output_dimension) self.assertEqual(width, output_dimension) + # Error handling tests + # Check that an error is raised when an invalid number of parameters + # is specified + with self.assertRaises(ValueError) as context: + vgsl_spec_string = "None,64,None,1 Mp2,2,2 O1s10" + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + + self.assertEqual(str(context.exception), + "MaxPooling layer Mp2,2,2 does not have the expected " + "number of parameters. Expected format: Mp," + ",,") + + # Check that an error is raised when an invalid value is specified + with self.assertRaises(ValueError) as context: + vgsl_spec_string = "None,64,None,1 Mp-2,2,2,2 O1s10" + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + + self.assertEqual(str(context.exception), + "Invalid values for pooling or stride in Mp-2,2,2,2. " + "All values should be positive integers.") + def test_avgpool_layer(self): vgsl_spec_string = "None,64,None,1 Ap2,2,2,2 O1s10" model_generator = self.VGSLModelGenerator(vgsl_spec_string) @@ -164,6 +208,27 @@ def test_avgpool_layer(self): self.assertEqual(height, output_dimension) self.assertEqual(width, output_dimension) + # Error handling tests + # Check that an error is raised when an invalid number of parameters + # is specified + with self.assertRaises(ValueError) as context: + vgsl_spec_string = "None,64,None,1 Ap2,2,2 O1s10" + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + + self.assertEqual(str(context.exception), + "AvgPool layer Ap2,2,2 does not have the expected " + "number of parameters. Expected format: Ap," + ",,") + + # Check that an error is raised when an invalid value is specified + with self.assertRaises(ValueError) as context: + vgsl_spec_string = "None,64,None,1 Ap-2,2,2,2 O1s10" + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + + self.assertEqual(str(context.exception), + "Invalid values for pooling or stride in Ap-2,2,2,2. " + "All values should be positive integers.") + def test_reshape_layer(self): vgsl_spec_string = "None,64,None,1 Rc O1s10" model_generator = self.VGSLModelGenerator(vgsl_spec_string) @@ -182,6 +247,22 @@ def test_reshape_layer(self): actual_shape = reshape_output.shape self.assertEqual(actual_shape, expected_shape) + # Error handling tests + # Test unexpected format + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator("None,64,None,1 R O1s10") + model = model_generator.build() + + self.assertEqual(str(context.exception), + "Reshape layer R is of unexpected format. Expected " + "format: Rc.") + + # Test incorrectly specified reshape layer + with self.assertRaises(ValueError) as context: + self.VGSLModelGenerator("None,64,None,1 Rx O1s10") + self.assertEqual(str(context.exception), + "Reshape layer Rx not specified correctly") + def test_fully_connected_layer(self): vgsl_spec_string = "None,64,None,1 Fs128 O1s10" model_generator = self.VGSLModelGenerator(vgsl_spec_string) @@ -203,6 +284,38 @@ def test_fully_connected_layer(self): self.assertEqual(model.layers[4].activation, activations.linear) self.assertEqual(model.layers[5].activation, activations.softmax) + # Error handling tests + # Test for malformed VGSL specification string for the dense layer + # No activation + vgsl_spec_string = "None,64,None,1 F128 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Dense layer F128 is of unexpected format. Expected " + "format: F(s|t|r|l|m).") + # No neurons + vgsl_spec_string = "None,64,None,1 Fs O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Dense layer Fs is of unexpected format. Expected " + "format: F(s|t|r|l|m).") + + # Test for invalid activation function + vgsl_spec_string = "None,64,None,1 Fz128 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Invalid activation 'z' for Dense layer Fz128.") + + # Test for invalid number of neurons (<= 0) + vgsl_spec_string = "None,64,None,1 Fs-100 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Invalid number of neurons -100 for Dense layer " + "Fs-100.") + def test_lstm_layer(self): vgsl_spec_string = "None,64,None,1 Rc Lfs128 O1s10" model_generator = self.VGSLModelGenerator(vgsl_spec_string) @@ -222,6 +335,38 @@ def test_lstm_layer(self): self.assertEqual(model.layers[2].go_backwards, True) self.assertEqual(model.layers[2].return_sequences, False) + # Error handling tests + # Missing direction + vgsl_spec_string = "None,64,None,1 L128 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "LSTM layer L128 is of unexpected format. Expected " + "format: L(f|r)[s].") + + # Invalid direction + vgsl_spec_string = "None,64,None,1 Lx128 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "LSTM layer Lx128 is of unexpected format. Expected " + "format: L(f|r)[s].") + + # Missing number of units + vgsl_spec_string = "None,64,None,1 Lf O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "LSTM layer Lf is of unexpected format. Expected " + "format: L(f|r)[s].") + + # Invalid number of units (negative) + vgsl_spec_string = "None,64,None,1 Lf-128 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Invalid number of units -128 for LSTM layer Lf-128.") + def test_gru_layer(self): vgsl_spec_string = "None,64,None,1 Rc Gfs128 O1s10" model_generator = self.VGSLModelGenerator(vgsl_spec_string) @@ -241,21 +386,71 @@ def test_gru_layer(self): self.assertEqual(model.layers[2].go_backwards, True) self.assertEqual(model.layers[2].return_sequences, False) + # Error handling tests + # Missing direction + vgsl_spec_string = "None,64,None,1 G128 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "GRU layer G128 is of unexpected format. Expected " + "format: G(f|r)[s].") + + # Invalid direction + vgsl_spec_string = "None,64,None,1 Gx128 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "GRU layer Gx128 is of unexpected format. Expected " + "format: G(f|r)[s].") + + # Invalid number of units (negative) + vgsl_spec_string = "None,64,None,1 Gf-128 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Invalid number of units -128 for GRU layer Gf-128.") + def test_bidirectional_layer(self): - vgsl_spec_string = "None,64,None,1 Rc Bgs128 O1s10" + vgsl_spec_string = "None,64,None,1 Rc Bg128 O1s10" model_generator = self.VGSLModelGenerator(vgsl_spec_string) model = model_generator.build() self.assertIsInstance(model.layers[2], layers.Bidirectional) self.assertIsInstance(model.layers[2].layer, layers.GRU) self.assertEqual(model.layers[2].layer.units, 128) - vgsl_spec_string = "None,64,None,1 Rc Bls128 O1s10" + vgsl_spec_string = "None,64,None,1 Rc Bl128 O1s10" model_generator = self.VGSLModelGenerator(vgsl_spec_string) model = model_generator.build() self.assertIsInstance(model.layers[2], layers.Bidirectional) self.assertIsInstance(model.layers[2].layer, layers.LSTM) self.assertEqual(model.layers[2].layer.units, 128) + # Error handling tests + # Invalid format + vgsl_spec_string = "None,64,None,1 Rc B128 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Layer B128 is of unexpected format. Expected " + "format: B(g|l) where 'g' stands for GRU, 'l' " + "stands for LSTM, and 'n' is the number of units.") + + # Invalid RNN layer type + vgsl_spec_string = "None,64,None,1 Rc Bx128 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Layer Bx128 is of unexpected format. Expected " + "format: B(g|l) where 'g' stands for GRU, 'l' " + "stands for LSTM, and 'n' is the number of units.") + + # Invalid number of units (negative) + vgsl_spec_string = "None,64,None,1 Rc Bg-128 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Invalid number of units -128 for layer Bg-128.") + def test_residual_block(self): vgsl_spec_string = "None,64,None,1 RB3,3,16 O1s10" model_generator = self.VGSLModelGenerator(vgsl_spec_string) @@ -282,6 +477,31 @@ def test_residual_block(self): # Check that conv1 also has strides of 2 self.assertEqual(model.layers[1].conv1.strides, (2, 2)) + # Error handling tests + # Invalid format + vgsl_spec_string = "None,64,None,1 RBd O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Layer RBd is of unexpected format. Expected format: " + "RB[d],,.") + + # Invalid parameters (negative values) + vgsl_spec_string = "None,64,None,1 RB-3,3,-16 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Invalid parameters x=-3, y=3, d=-16 in layer " + "RB-3,3,-16. All values should be positive integers.") + + # Missing parameters + vgsl_spec_string = "None,64,None,1 RB3,3 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Layer RB3,3 is of unexpected format. Expected " + "format: RB[d],,.") + def test_dropout_layer(self): vgsl_spec_string = "None,64,None,1 D50 O1s10" model_generator = self.VGSLModelGenerator(vgsl_spec_string) @@ -289,6 +509,29 @@ def test_dropout_layer(self): self.assertIsInstance(model.layers[1], layers.Dropout) self.assertEqual(model.layers[1].rate, 0.5) + # Error handling tests + # Invalid format + vgsl_spec_string = "None,64,None,1 D O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Layer D is of unexpected format. Expected format: " + "D where rate is between 0 and 100.") + + # Invalid dropout rate (negative value) + vgsl_spec_string = "None,64,None,1 D-50 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Dropout rate must be in the range [0, 100].") + + # Invalid dropout rate (value greater than 100) + vgsl_spec_string = "None,64,None,1 D101 O1s10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Dropout rate must be in the range [0, 100].") + def test_output_layer(self): vgsl_spec_string = "None,64,None,1 Cr3,3,32 O1s10" model_generator = self.VGSLModelGenerator(vgsl_spec_string) @@ -307,7 +550,20 @@ def test_output_layer(self): # Check that the output layer has the correct number of units self.assertEqual(model.layers[-2].units, 5) - # TODO: CTCLayer + # Error handling tests + # Invalid format + vgsl_spec_string = "None,64,None,1 OXs10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Layer OXs10 is of unexpected format. Expected format: O[210](l|s).") + + # Invalid linearity + vgsl_spec_string = "None,64,None,1 O1x10" + with self.assertRaises(ValueError) as context: + model_generator = self.VGSLModelGenerator(vgsl_spec_string) + self.assertEqual(str(context.exception), + "Output layer linearity x is not supported.") if __name__ == "__main__":