From c4b2a922487268ff1f8eec69d38fc69fcce3312b Mon Sep 17 00:00:00 2001 From: harshilraval Date: Tue, 6 May 2025 18:12:08 +0530 Subject: [PATCH 1/2] Serialisation deserialisation tests for python sdk --- requirements.txt | 3 +- tests/serdesertest/__init__.py | 0 .../authorization_request_serdeser_test.py | 64 + .../bulk_response_serdeser_test.py | 103 ++ .../conductor_application_serdeser_test.py | 46 + .../conductor_user_serdeser_test.py | 104 ++ ...lation_ids_search_request_serdeser_test.py | 58 + .../rerun_workflow_request_serdeser_test.py | 68 + tests/serdesertest/util/__init__.py | 0 .../util/ser_deser_json_string.json | 1547 +++++++++++++++++ .../util/serdeser_json_resolver_utility.py | 229 +++ 11 files changed, 2221 insertions(+), 1 deletion(-) create mode 100644 tests/serdesertest/__init__.py create mode 100644 tests/serdesertest/authorization_request_serdeser_test.py create mode 100644 tests/serdesertest/bulk_response_serdeser_test.py create mode 100644 tests/serdesertest/conductor_application_serdeser_test.py create mode 100644 tests/serdesertest/conductor_user_serdeser_test.py create mode 100644 tests/serdesertest/correlation_ids_search_request_serdeser_test.py create mode 100644 tests/serdesertest/rerun_workflow_request_serdeser_test.py create mode 100644 tests/serdesertest/util/__init__.py create mode 100644 tests/serdesertest/util/ser_deser_json_string.json create mode 100644 tests/serdesertest/util/serdeser_json_resolver_utility.py diff --git a/requirements.txt b/requirements.txt index 4f8fe193b..8f6b97443 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,4 +5,5 @@ requests >= 2.31.0 typing-extensions >= 4.2.0 astor >= 0.8.1 shortuuid >= 1.0.11 -dacite >= 1.8.1 \ No newline at end of file +dacite >= 1.8.1 +deprecated>=1.2.13 \ No newline at end of file diff --git a/tests/serdesertest/__init__.py b/tests/serdesertest/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/serdesertest/authorization_request_serdeser_test.py b/tests/serdesertest/authorization_request_serdeser_test.py new file mode 100644 index 000000000..3eedb0481 --- /dev/null +++ b/tests/serdesertest/authorization_request_serdeser_test.py @@ -0,0 +1,64 @@ +import unittest +import json +from serdesertest.util.serdeser_json_resolver_utility import JsonTemplateResolver + +# Import the classes being tested +from conductor.client.http.models.authorization_request import AuthorizationRequest + + +class TestAuthorizationRequestSerDes(unittest.TestCase): + """ + Unit tests for serialization and deserialization of AuthorizationRequest model. + """ + + def setUp(self): + """Set up test fixtures.""" + # Load the template JSON for testing + self.server_json_str = JsonTemplateResolver.get_json_string("AuthorizationRequest") + self.server_json = json.loads(self.server_json_str) + + def test_serialization_deserialization(self): + """Test complete serialization and deserialization process.""" + # Create the authorization request object directly from JSON + # The model's __init__ should handle the nested objects + auth_request = AuthorizationRequest( + subject=self.server_json.get('subject'), + target=self.server_json.get('target'), + access=self.server_json.get('access') + ) + + # Verify model is properly initialized + self.assertIsNotNone(auth_request, "Deserialized object should not be null") + + # Verify access list + self.assertIsNotNone(auth_request.access, "Access list should not be null") + self.assertTrue(all(access in ["CREATE", "READ", "UPDATE", "DELETE", "EXECUTE"] + for access in auth_request.access)) + + # Verify subject and target are present + self.assertIsNotNone(auth_request.subject, "Subject should not be null") + self.assertIsNotNone(auth_request.target, "Target should not be null") + + # Serialize back to dictionary + result_dict = auth_request.to_dict() + + # Verify structure matches the original + self.assertEqual( + set(self.server_json.keys()), + set(result_dict.keys()), + "Serialized JSON should have the same keys as the original" + ) + + # Convert both to JSON strings and compare (similar to objectMapper.readTree) + original_json_normalized = json.dumps(self.server_json, sort_keys=True) + result_json_normalized = json.dumps(result_dict, sort_keys=True) + + self.assertEqual( + original_json_normalized, + result_json_normalized, + "Serialized JSON should match the original SERVER_JSON" + ) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/serdesertest/bulk_response_serdeser_test.py b/tests/serdesertest/bulk_response_serdeser_test.py new file mode 100644 index 000000000..2819e78ad --- /dev/null +++ b/tests/serdesertest/bulk_response_serdeser_test.py @@ -0,0 +1,103 @@ +import unittest +import json + +from conductor.client.http.models import BulkResponse +from serdesertest.util.serdeser_json_resolver_utility import JsonTemplateResolver + + +class TestBulkResponseSerDeser(unittest.TestCase): + """Test serialization and deserialization for BulkResponse class""" + + def setUp(self): + """Set up test fixtures""" + # Load test data from template + self.server_json_str = JsonTemplateResolver.get_json_string("BulkResponse") + # Parse into dictionary for comparisons + self.server_json_dict = json.loads(self.server_json_str) + + def test_bulk_response_serialization_deserialization(self): + """Comprehensive test for serialization and deserialization of BulkResponse""" + # 1. Deserialize JSON into model object + bulk_response = BulkResponse( + bulk_error_results=self.server_json_dict['bulkErrorResults'], + bulk_successful_results=self.server_json_dict['bulkSuccessfulResults'] + ) + + # 2. Verify BulkResponse object properties and types + self.assertIsInstance(bulk_response, BulkResponse) + self.assertIsInstance(bulk_response.bulk_error_results, dict) + self.assertIsInstance(bulk_response.bulk_successful_results, list) + + # 3. Validate content of properties + for key, value in bulk_response.bulk_error_results.items(): + self.assertIsInstance(key, str) + self.assertIsInstance(value, str) + + # Validate the structure of items in bulk_successful_results + # This adapts to the actual structure found in the template + for item in bulk_response.bulk_successful_results: + # If the items are dictionaries with 'value' keys + if isinstance(item, dict) and 'value' in item: + self.assertIsInstance(item['value'], str) + # If the items are strings + elif isinstance(item, str): + pass + else: + self.fail(f"Unexpected item type in bulk_successful_results: {type(item)}") + + # 4. Verify values match original source + self.assertEqual(bulk_response.bulk_error_results, self.server_json_dict['bulkErrorResults']) + self.assertEqual(bulk_response.bulk_successful_results, self.server_json_dict['bulkSuccessfulResults']) + + # 5. Test serialization back to dictionary + result_dict = bulk_response.to_dict() + self.assertIn('bulk_error_results', result_dict) + self.assertIn('bulk_successful_results', result_dict) + self.assertEqual(result_dict['bulk_error_results'], self.server_json_dict['bulkErrorResults']) + self.assertEqual(result_dict['bulk_successful_results'], self.server_json_dict['bulkSuccessfulResults']) + + # 6. Test serialization to JSON-compatible dictionary (with camelCase keys) + json_compatible_dict = { + 'bulkErrorResults': result_dict['bulk_error_results'], + 'bulkSuccessfulResults': result_dict['bulk_successful_results'] + } + + # 7. Normalize dictionaries for comparison (handles differences in ordering) + normalized_original = json.loads(json.dumps(self.server_json_dict, sort_keys=True)) + normalized_result = json.loads(json.dumps(json_compatible_dict, sort_keys=True)) + self.assertEqual(normalized_original, normalized_result) + + # 8. Test full serialization/deserialization round trip + bulk_response_2 = BulkResponse( + bulk_error_results=result_dict['bulk_error_results'], + bulk_successful_results=result_dict['bulk_successful_results'] + ) + self.assertEqual(bulk_response.bulk_error_results, bulk_response_2.bulk_error_results) + self.assertEqual(bulk_response.bulk_successful_results, bulk_response_2.bulk_successful_results) + + # 9. Test with missing fields + bulk_response_errors_only = BulkResponse( + bulk_error_results={"id1": "error1"} + ) + self.assertEqual(bulk_response_errors_only.bulk_error_results, {"id1": "error1"}) + self.assertIsNone(bulk_response_errors_only.bulk_successful_results) + + # Create a structure similar to what's in the template + sample_successful_result = [{"value": "success1"}] + bulk_response_success_only = BulkResponse( + bulk_successful_results=sample_successful_result + ) + self.assertIsNone(bulk_response_success_only.bulk_error_results) + self.assertEqual(bulk_response_success_only.bulk_successful_results, sample_successful_result) + + # 10. Test with empty fields + bulk_response_empty = BulkResponse( + bulk_error_results={}, + bulk_successful_results=[] + ) + self.assertEqual(bulk_response_empty.bulk_error_results, {}) + self.assertEqual(bulk_response_empty.bulk_successful_results, []) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/serdesertest/conductor_application_serdeser_test.py b/tests/serdesertest/conductor_application_serdeser_test.py new file mode 100644 index 000000000..428258286 --- /dev/null +++ b/tests/serdesertest/conductor_application_serdeser_test.py @@ -0,0 +1,46 @@ +import unittest +import json + +from conductor.client.http.models import ConductorApplication +from serdesertest.util.serdeser_json_resolver_utility import JsonTemplateResolver + + + +class TestConductorApplicationSerialization(unittest.TestCase): + """Test case for ConductorApplication serialization and deserialization.""" + + def setUp(self): + """Set up test fixtures before each test.""" + # Load JSON template from the resolver utility + self.server_json_str = JsonTemplateResolver.get_json_string("ConductorApplication") + self.server_json = json.loads(self.server_json_str) + + def test_serialization_deserialization(self): + """Test that validates the serialization and deserialization of ConductorApplication model.""" + + # Step 1: Deserialize server JSON into SDK model object + # Create model object using constructor with fields from the JSON + conductor_app = ConductorApplication( + id=self.server_json.get('id'), + name=self.server_json.get('name'), + created_by=self.server_json.get('createdBy') + ) + + # Step 2: Verify all fields are correctly populated + self.assertEqual(conductor_app.id, self.server_json.get('id')) + self.assertEqual(conductor_app.name, self.server_json.get('name')) + self.assertEqual(conductor_app.created_by, self.server_json.get('createdBy')) + + # Step 3: Serialize the model back to JSON + serialized_json = conductor_app.to_dict() + + # Step 4: Verify the serialized JSON matches the original + # Note: Field names in serialized_json will be in snake_case + self.assertEqual(serialized_json.get('id'), self.server_json.get('id')) + self.assertEqual(serialized_json.get('name'), self.server_json.get('name')) + # Handle the camelCase to snake_case transformation + self.assertEqual(serialized_json.get('created_by'), self.server_json.get('createdBy')) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/serdesertest/conductor_user_serdeser_test.py b/tests/serdesertest/conductor_user_serdeser_test.py new file mode 100644 index 000000000..b84642065 --- /dev/null +++ b/tests/serdesertest/conductor_user_serdeser_test.py @@ -0,0 +1,104 @@ +import json +import unittest + +from conductor.client.http.models import ConductorUser, Role, Group +from serdesertest.util.serdeser_json_resolver_utility import JsonTemplateResolver + + +class TestConductorUserSerDeSer(unittest.TestCase): + """Test serialization and deserialization of ConductorUser.""" + + def setUp(self): + # Load JSON template using the utility + self.server_json_str = JsonTemplateResolver.get_json_string("ConductorUser") + self.server_json = json.loads(self.server_json_str) + + def test_conductor_user_serde(self): + """Test that ConductorUser can be deserialized from server JSON and serialized back without data loss.""" + + # 1. Deserialize server JSON into ConductorUser object + conductor_user = ConductorUser() + conductor_user_dict = self.server_json + + # Set attributes from deserialized JSON + if 'id' in conductor_user_dict: + conductor_user.id = conductor_user_dict['id'] + if 'name' in conductor_user_dict: + conductor_user.name = conductor_user_dict['name'] + if 'roles' in conductor_user_dict: + # Assuming Role has a from_dict method or similar + roles_list = [] + for role_data in conductor_user_dict['roles']: + role = Role() # Create a Role object based on your actual implementation + # Set Role properties here + roles_list.append(role) + conductor_user.roles = roles_list + if 'groups' in conductor_user_dict: + # Assuming Group has a from_dict method or similar + groups_list = [] + for group_data in conductor_user_dict['groups']: + group = Group() # Create a Group object based on your actual implementation + # Set Group properties here + groups_list.append(group) + conductor_user.groups = groups_list + if 'uuid' in conductor_user_dict: + conductor_user.uuid = conductor_user_dict['uuid'] + if 'applicationUser' in conductor_user_dict: + conductor_user.application_user = conductor_user_dict['applicationUser'] + if 'encryptedId' in conductor_user_dict: + conductor_user.encrypted_id = conductor_user_dict['encryptedId'] + if 'encryptedIdDisplayValue' in conductor_user_dict: + conductor_user.encrypted_id_display_value = conductor_user_dict['encryptedIdDisplayValue'] + + # 2. Verify all fields are properly populated + expected_id = self.server_json.get('id', None) + self.assertEqual(conductor_user.id, expected_id) + + expected_name = self.server_json.get('name', None) + self.assertEqual(conductor_user.name, expected_name) + + # Verify lists + if 'roles' in self.server_json: + self.assertEqual(len(conductor_user.roles), len(self.server_json['roles'])) + + if 'groups' in self.server_json: + self.assertEqual(len(conductor_user.groups), len(self.server_json['groups'])) + + expected_uuid = self.server_json.get('uuid', None) + self.assertEqual(conductor_user.uuid, expected_uuid) + + expected_app_user = self.server_json.get('applicationUser', None) + self.assertEqual(conductor_user.application_user, expected_app_user) + + expected_encrypted_id = self.server_json.get('encryptedId', None) + self.assertEqual(conductor_user.encrypted_id, expected_encrypted_id) + + expected_encrypted_id_display = self.server_json.get('encryptedIdDisplayValue', None) + self.assertEqual(conductor_user.encrypted_id_display_value, expected_encrypted_id_display) + + # 3. Serialize the object back to JSON + serialized_json = conductor_user.to_dict() + + # 4. Verify the serialized JSON matches the original + # Handle camelCase to snake_case transformations + if 'applicationUser' in self.server_json: + self.assertEqual(serialized_json['application_user'], self.server_json['applicationUser']) + if 'encryptedId' in self.server_json: + self.assertEqual(serialized_json['encrypted_id'], self.server_json['encryptedId']) + if 'encryptedIdDisplayValue' in self.server_json: + self.assertEqual(serialized_json['encrypted_id_display_value'], self.server_json['encryptedIdDisplayValue']) + + # Check common fields that don't need transformation + for field in ['id', 'name', 'uuid']: + if field in self.server_json: + self.assertEqual(serialized_json[field], self.server_json[field]) + + # Check lists length + if 'roles' in self.server_json: + self.assertEqual(len(serialized_json['roles']), len(self.server_json['roles'])) + if 'groups' in self.server_json: + self.assertEqual(len(serialized_json['groups']), len(self.server_json['groups'])) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/serdesertest/correlation_ids_search_request_serdeser_test.py b/tests/serdesertest/correlation_ids_search_request_serdeser_test.py new file mode 100644 index 000000000..39bc6d58a --- /dev/null +++ b/tests/serdesertest/correlation_ids_search_request_serdeser_test.py @@ -0,0 +1,58 @@ +import unittest +import json + +from conductor.client.http.models.correlation_ids_search_request import CorrelationIdsSearchRequest +from serdesertest.util.serdeser_json_resolver_utility import JsonTemplateResolver + +class TestCorrelationIdsSearchRequest(unittest.TestCase): + """Test case for CorrelationIdsSearchRequest class.""" + + def setUp(self): + """Set up test fixtures.""" + # Load the JSON template for CorrelationIdsSearchRequest + self.server_json_str = JsonTemplateResolver.get_json_string("CorrelationIdsSearchRequest") + self.server_json = json.loads(self.server_json_str) + + # Convert camelCase to snake_case for initialization + self.python_format_json = {} + for key, value in self.server_json.items(): + # Use the attribute_map to find the Python property name + python_key = next((k for k, v in CorrelationIdsSearchRequest.attribute_map.items() if v == key), key) + self.python_format_json[python_key] = value + + def test_serdeser_correlation_ids_search_request(self): + """Test serialization and deserialization of CorrelationIdsSearchRequest.""" + # 1. Server JSON can be correctly deserialized into SDK model object + model_obj = CorrelationIdsSearchRequest(**self.python_format_json) + + # 2. All fields are properly populated during deserialization + # Check correlation_ids (list[str]) + self.assertIsNotNone(model_obj.correlation_ids) + self.assertIsInstance(model_obj.correlation_ids, list) + for item in model_obj.correlation_ids: + self.assertIsInstance(item, str) + + # Check workflow_names (list[str]) + self.assertIsNotNone(model_obj.workflow_names) + self.assertIsInstance(model_obj.workflow_names, list) + for item in model_obj.workflow_names: + self.assertIsInstance(item, str) + + # 3. The SDK model can be serialized back to JSON + serialized_dict = model_obj.to_dict() + + # 4. The resulting JSON matches the original + # Convert serialized dict keys to camelCase for comparison + json_dict = {} + for attr, value in serialized_dict.items(): + if attr in model_obj.attribute_map: + json_dict[model_obj.attribute_map[attr]] = value + else: + json_dict[attr] = value + + # Compare with original JSON + self.assertEqual(self.server_json, json_dict) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/serdesertest/rerun_workflow_request_serdeser_test.py b/tests/serdesertest/rerun_workflow_request_serdeser_test.py new file mode 100644 index 000000000..5625fa4c4 --- /dev/null +++ b/tests/serdesertest/rerun_workflow_request_serdeser_test.py @@ -0,0 +1,68 @@ +import unittest +import json +from copy import deepcopy + +from conductor.client.http.models import RerunWorkflowRequest +from serdesertest.util.serdeser_json_resolver_utility import JsonTemplateResolver + + +class TestRerunWorkflowRequestSerialization(unittest.TestCase): + """Test serialization and deserialization of RerunWorkflowRequest.""" + + def setUp(self): + """Set up test data.""" + # Get the JSON template for RerunWorkflowRequest + self.server_json_str = JsonTemplateResolver.get_json_string("RerunWorkflowRequest") + self.request_json = json.loads(self.server_json_str) + + # Create the SDK object for reuse in multiple tests + self.request_obj = RerunWorkflowRequest( + re_run_from_workflow_id=self.request_json["reRunFromWorkflowId"], + workflow_input=self.request_json["workflowInput"], + re_run_from_task_id=self.request_json["reRunFromTaskId"], + task_input=self.request_json["taskInput"], + correlation_id=self.request_json["correlationId"] + ) + + # Transform SDK object dict to match server format (for reuse in tests) + result_dict = self.request_obj.to_dict() + self.transformed_dict = { + "reRunFromWorkflowId": result_dict["re_run_from_workflow_id"], + "workflowInput": result_dict["workflow_input"], + "reRunFromTaskId": result_dict["re_run_from_task_id"], + "taskInput": result_dict["task_input"], + "correlationId": result_dict["correlation_id"] + } + + def test_serialization_deserialization_cycle(self): + """Test the complete serialization/deserialization cycle.""" + # 1. Test deserialization: Assert that fields are correctly populated + self.assertEqual(self.request_obj.re_run_from_workflow_id, "sample_reRunFromWorkflowId") + self.assertEqual(self.request_obj.re_run_from_task_id, "sample_reRunFromTaskId") + self.assertEqual(self.request_obj.correlation_id, "sample_correlationId") + + # Check dictionary fields (maps) + self.assertIsInstance(self.request_obj.workflow_input, dict) + self.assertEqual(self.request_obj.workflow_input["sample_key"], "sample_value") + + self.assertIsInstance(self.request_obj.task_input, dict) + self.assertEqual(self.request_obj.task_input["sample_key"], "sample_value") + + # 2. Test serialization: Compare individual fields + self.assertEqual(self.transformed_dict["reRunFromWorkflowId"], self.request_json["reRunFromWorkflowId"]) + self.assertEqual(self.transformed_dict["reRunFromTaskId"], self.request_json["reRunFromTaskId"]) + self.assertEqual(self.transformed_dict["correlationId"], self.request_json["correlationId"]) + + # Compare dictionary fields + self.assertEqual(self.transformed_dict["workflowInput"], self.request_json["workflowInput"]) + self.assertEqual(self.transformed_dict["taskInput"], self.request_json["taskInput"]) + + # 3. Ensure no fields are missing + self.assertEqual(set(self.transformed_dict.keys()), set(self.request_json.keys())) + + # 4. Test full cycle with deep equality + self.assertEqual(self.transformed_dict, self.request_json) + + +if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/tests/serdesertest/util/__init__.py b/tests/serdesertest/util/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/serdesertest/util/ser_deser_json_string.json b/tests/serdesertest/util/ser_deser_json_string.json new file mode 100644 index 000000000..15785f33c --- /dev/null +++ b/tests/serdesertest/util/ser_deser_json_string.json @@ -0,0 +1,1547 @@ +{ + "templates" : { + "IndexedDoc" : { + "content" : { + "score" : 123.456, + "metadata" : { + "sample_key" : "sample_value" + }, + "docId" : "sample_docId", + "text" : "sample_text", + "parentDocId" : "sample_parentDocId" + }, + "dependencies" : [ ] + }, + "WorkflowTestRequest.TaskMock" : { + "content" : { + "output" : { + "sample_key" : "sample_value" + }, + "executionTime" : 123, + "queueWaitTime" : 123, + "status" : "${TaskResult.Status}" + }, + "dependencies" : [ ] + }, + "Access" : { + "content" : "CREATE", + "enums" : { + "values" : [ "CREATE", "READ", "EXECUTE", "UPDATE", "DELETE" ], + "constants" : { + "READ" : "(100)", + "EXECUTE" : "(200)", + "DELETE" : "(400)", + "CREATE" : "(0)", + "UPDATE" : "(300)" + }, + "sampleValue" : "CREATE" + }, + "dependencies" : [ ] + }, + "GenerateTokenRequest" : { + "content" : { + "keyId" : "sample_keyId", + "keySecret" : "sample_keySecret" + }, + "dependencies" : [ ] + }, + "StateChangeEvent" : { + "content" : { + "payload" : { + "key" : "sample_value" + }, + "type" : "sample_type" + }, + "dependencies" : [ ] + }, + "WorkflowTask.WorkflowTaskList" : { + "content" : { + "tasks" : [ "${WorkflowTask}" ] + }, + "dependencies" : [ "WorkflowTask" ] + }, + "SubWorkflowParams" : { + "content" : { + "workflowDefinition" : "${WorkflowDef}", + "idempotencyKey" : "sample_idempotencyKey", + "name" : "sample_name", + "taskToDomain" : { + "sample_key" : "sample_value" + }, + "priority" : "sample_object_priority", + "version" : 123, + "idempotencyStrategy" : "${IdempotencyStrategy}" + }, + "dependencies" : [ "IdempotencyStrategy" ] + }, + "SubWorkflowParams1" : { + "content" : { + "workflowDefinition" : null, + "idempotencyKey" : "sample_idempotencyKey", + "name" : "sample_name", + "taskToDomain" : { + "sample_key" : "sample_value" + }, + "priority" : "sample_object_priority", + "version" : 123, + "idempotencyStrategy" : "${IdempotencyStrategy}" + }, + "dependencies" : [ "IdempotencyStrategy" ] + }, + "CorrelationIdsSearchRequest" : { + "content" : { + "workflowNames" : [ "sample_workflowNames" ], + "correlationIds" : [ "sample_correlationIds" ] + }, + "dependencies" : [ ] + }, + "SubjectType" : { + "content" : "USER", + "enums" : { + "values" : [ "USER", "ROLE", "GROUP" ], + "constants" : { + "ROLE" : "(1)", + "GROUP" : "(2)", + "USER" : "(0)" + }, + "sampleValue" : "USER" + }, + "dependencies" : [ ] + }, + "SchemaDef.Type" : { + "content" : "JSON", + "enums" : { + "values" : [ "JSON", "AVRO", "PROTOBUF" ], + "constants" : { + "JSON" : "(0)", + "PROTOBUF" : "(2)", + "AVRO" : "(1)" + }, + "sampleValue" : "JSON" + }, + "dependencies" : [ ] + }, + "TaskType" : { + "content" : { + "values" : [ "SIMPLE", "DYNAMIC", "FORK_JOIN", "FORK_JOIN_DYNAMIC", "DECISION", "SWITCH", "JOIN", "DO_WHILE", "SUB_WORKFLOW", "START_WORKFLOW", "EVENT", "WAIT", "HUMAN", "USER_DEFINED", "HTTP", "LAMBDA", "INLINE", "EXCLUSIVE_JOIN", "TERMINATE", "KAFKA_PUBLISH", "JSON_JQ_TRANSFORM", "SET_VARIABLE", "NOOP" ], + "constants" : { + "SUB_WORKFLOW" : "(8)", + "DECISION" : "(4)", + "NOOP" : "(22)", + "JOIN" : "(6)", + "KAFKA_PUBLISH" : "(19)", + "HTTP" : "(14)", + "FORK_JOIN" : "(2)", + "EVENT" : "(10)", + "DYNAMIC" : "(1)", + "LAMBDA" : "(15)", + "JSON_JQ_TRANSFORM" : "(20)", + "HUMAN" : "(12)", + "TERMINATE" : "(18)", + "FORK_JOIN_DYNAMIC" : "(3)", + "SIMPLE" : "(0)", + "DO_WHILE" : "(7)", + "INLINE" : "(16)", + "SWITCH" : "(5)", + "EXCLUSIVE_JOIN" : "(17)", + "SET_VARIABLE" : "(21)", + "WAIT" : "(11)", + "START_WORKFLOW" : "(9)", + "USER_DEFINED" : "(13)" + }, + "sampleValue" : "SIMPLE" + }, + "dependencies" : [ ] + }, + "Permission" : { + "content" : { + "name" : "sample_name" + }, + "dependencies" : [ ] + }, + "SaveScheduleRequest" : { + "content" : { + "cronExpression" : "sample_cronExpression", + "paused" : true, + "updatedBy" : "sample_updatedBy", + "startWorkflowRequest" : "${StartWorkflowRequest}", + "createdBy" : "sample_createdBy", + "name" : "sample_name", + "description" : "sample_description", + "zoneId" : "sample_zoneId", + "runCatchupScheduleInstances" : true, + "scheduleStartTime" : 123, + "scheduleEndTime" : 123 + }, + "dependencies" : [ "StartWorkflowRequest" ] + }, + "GrantedAccess" : { + "content" : { + "access" : [ "${Access}" ], + "tag" : "sample_tag", + "target" : "${TargetRef}" + }, + "dependencies" : [ "TargetRef", "Access" ] + }, + "IntegrationDefFormField.IntegrationDefFormFieldType" : { + "content" : "DROPDOWN", + "enums" : { + "values" : [ "DROPDOWN", "TEXT", "PASSWORD", "FILE" ], + "constants" : { + "DROPDOWN" : "(0)", + "PASSWORD" : "(2)", + "TEXT" : "(1)", + "FILE" : "(3)" + }, + "sampleValue" : "DROPDOWN" + }, + "dependencies" : [ ] + }, + "WorkflowDef" : { + "content" : { + "workflowStatusListenerSink" : "sample_workflowStatusListenerSink", + "variables" : { + "sample_key" : "sample_value" + }, + "schemaVersion" : 123, + "timeoutPolicy" : "${WorkflowDef.TimeoutPolicy}", + "outputSchema" : "${SchemaDef}", + "enforceSchema" : true, + "restartable" : true, + "inputSchema" : "${SchemaDef}", + "description" : "sample_description", + "version" : 123, + "inputParameters" : [ "sample_inputParameters" ], + "inputTemplate" : { + "sample_key" : "sample_value" + }, + "workflowStatusListenerEnabled" : true, + "ownerEmail" : "sample_ownerEmail", + "rateLimitConfig" : "${RateLimitConfig}", + "name" : "sample_name", + "timeoutSeconds" : 123, + "failureWorkflow" : "sample_failureWorkflow", + "tasks" : [ "${WorkflowTask}" ], + "outputParameters" : { + "sample_key" : "sample_value" + }, + "metadata" : { + "sample_key" : "sample_value" + } + }, + "dependencies" : [ "WorkflowTask", "RateLimitConfig", "SchemaDef" ], + "inherits" : ["Auditable"] + }, + "WorkflowDef1" : { + "content" : { + "workflowStatusListenerSink" : "sample_workflowStatusListenerSink", + "variables" : { + "sample_key" : "sample_value" + }, + "schemaVersion" : 123, + "timeoutPolicy" : "${WorkflowDef.TimeoutPolicy}", + "outputSchema" : "${SchemaDef}", + "enforceSchema" : true, + "restartable" : true, + "inputSchema" : "${SchemaDef}", + "description" : "sample_description", + "version" : 123, + "inputParameters" : [ "sample_inputParameters" ], + "inputTemplate" : { + "sample_key" : "sample_value" + }, + "workflowStatusListenerEnabled" : true, + "ownerEmail" : "sample_ownerEmail", + "rateLimitConfig" : "${RateLimitConfig}", + "name" : "sample_name", + "timeoutSeconds" : 123, + "failureWorkflow" : "sample_failureWorkflow", + "tasks" : [ "${WorkflowTask}" ], + "outputParameters" : { + "sample_key" : "sample_value" + } + }, + "dependencies" : [ "WorkflowTask", "RateLimitConfig", "SchemaDef" ], + "inherits" : ["Auditable"] + }, + "Workflow" : { + "content" : { + "variables" : { + "sample_key" : "sample_value" + }, + "workflowDefinition" : "${WorkflowDef}", + "externalInputPayloadStoragePath" : "sample_externalInputPayloadStoragePath", + "lastRetriedTime" : 123, + "parentWorkflowTaskId" : "sample_parentWorkflowTaskId", + "taskToDomain" : { + "sample_key" : "sample_value" + }, + "history" : [ "${Workflow1}" ], + "priority" : 1, + "failedReferenceTaskNames" : [ "sample_value" ], + "output" : { + "sample_key" : "sample_value" + }, + "input" : { + "sample_key" : "sample_value" + }, + "parentWorkflowId" : "sample_parentWorkflowId", + "failedTaskNames" : [ "sample_value" ], + "reasonForIncompletion" : "sample_reasonForIncompletion", + "reRunFromWorkflowId" : "sample_reRunFromWorkflowId", + "correlationId" : "sample_correlationId", + "endTime" : 123, + "event" : "sample_event", + "workflowId" : "sample_workflowId", + "tasks" : [ "${Task}" ], + "status" : "${Workflow.WorkflowStatus}", + "externalOutputPayloadStoragePath" : "sample_externalOutputPayloadStoragePath", + "idempotencyKey" : "sample_idempotencyKey", + "rateLimitKey" : "sample_rateLimitKey", + "rateLimited" : true + }, + "dependencies" : [ "Workflow.WorkflowStatus", "Task", "WorkflowDef" ], + "inherits" : ["Auditable"] + }, + "Workflow1" : { + "content" : { + "variables" : { + "sample_key" : "sample_value" + }, + "workflowDefinition" : "${WorkflowDef}", + "externalInputPayloadStoragePath" : "sample_externalInputPayloadStoragePath", + "lastRetriedTime" : 123, + "parentWorkflowTaskId" : "sample_parentWorkflowTaskId", + "taskToDomain" : { + "sample_key" : "sample_value" + }, + "history" : [ ], + "priority" : 1, + "failedReferenceTaskNames" : [ "sample_value" ], + "output" : { + "sample_key" : "sample_value" + }, + "input" : { + "sample_key" : "sample_value" + }, + "parentWorkflowId" : "sample_parentWorkflowId", + "failedTaskNames" : [ "sample_value" ], + "reasonForIncompletion" : "sample_reasonForIncompletion", + "reRunFromWorkflowId" : "sample_reRunFromWorkflowId", + "correlationId" : "sample_correlationId", + "endTime" : 123, + "event" : "sample_event", + "workflowId" : "sample_workflowId", + "tasks" : [ ], + "status" : "${Workflow.WorkflowStatus}", + "externalOutputPayloadStoragePath" : "sample_externalOutputPayloadStoragePath", + "idempotencyKey" : "sample_idempotencyKey", + "rateLimitKey" : "sample_rateLimitKey", + "rateLimited" : true + }, + "dependencies" : [ "Workflow.WorkflowStatus", "Task", "WorkflowDef" ], + "inherits" : ["Auditable"] + }, + "IntegrationDefFormField" : { + "content" : { + "fieldName" : "${ConfigKey}", + "dependsOn" : [ "${IntegrationDefFormField1}" ], + "defaultValue" : "sample_defaultValue", + "description" : "sample_description", + "optional" : true, + "label" : "sample_label", + "fieldType" : "${IntegrationDefFormField.IntegrationDefFormFieldType}", + "value" : "sample_value", + "valueOptions" : [ "${IntegrationDefFormField.Option}" ] + }, + "dependencies" : [ "IntegrationDefFormField.IntegrationDefFormFieldType", "IntegrationDefFormField.Option", "ConfigKey" ] + }, + "IntegrationDefFormField1" : { + "content" : { + "fieldName" : "${ConfigKey}", + "dependsOn" : [ ], + "defaultValue" : "sample_defaultValue", + "description" : "sample_description", + "optional" : true, + "label" : "sample_label", + "fieldType" : "${IntegrationDefFormField.IntegrationDefFormFieldType}", + "value" : "sample_value", + "valueOptions" : [ "${IntegrationDefFormField.Option}" ] + }, + "dependencies" : [ "IntegrationDefFormField.IntegrationDefFormFieldType", "IntegrationDefFormField.Option", "ConfigKey" ] + }, + "Task.Status" : { + "content" : "IN_PROGRESS", + "enums" : { + "values" : [ "IN_PROGRESS", "CANCELED", "FAILED", "FAILED_WITH_TERMINAL_ERROR", "COMPLETED", "COMPLETED_WITH_ERRORS", "SCHEDULED", "TIMED_OUT", "SKIPPED" ], + "constants" : { + "IN_PROGRESS" : "(false, true, true)", + "FAILED_WITH_TERMINAL_ERROR" : "(true, false, false)", + "COMPLETED" : "(true, true, true)", + "FAILED" : "(true, false, true)", + "TIMED_OUT" : "(true, false, true)", + "CANCELED" : "(true, false, false)", + "COMPLETED_WITH_ERRORS" : "(true, true, true)", + "SKIPPED" : "(true, true, false)", + "SCHEDULED" : "(false, true, true)" + }, + "sampleValue" : "IN_PROGRESS" + }, + "dependencies" : [ ] + }, + "LLMWorkerInput" : { + "content" : { + "stopWords" : [ "sample_stopWords" ], + "llmProvider" : "sample_llmProvider", + "maxResults" : 123, + "temperature" : 123.456, + "maxTokens" : 123, + "embeddingModelProvider" : "sample_embeddingModelProvider", + "model" : "sample_model", + "embeddingModel" : "sample_embeddingModel", + "prompt" : "sample_prompt", + "topP" : 123.456 + }, + "dependencies" : [ ] + }, + "TaskResult" : { + "content" : { + "outputData" : { + "sample_key" : "sample_value" + }, + "extendLease" : true, + "callbackAfterSeconds" : 123, + "workerId" : "sample_workerId", + "subWorkflowId" : "sample_subWorkflowId", + "reasonForIncompletion" : "sample_reasonForIncompletion", + "workflowInstanceId" : "sample_workflowInstanceId", + "logs" : [ "${TaskExecLog}" ], + "taskId" : "sample_taskId", + "status" : "${TaskResult.Status}", + "externalOutputPayloadStoragePath" : "sample_externalOutputPayloadStoragePath" + }, + "dependencies" : [ "TaskResult.Status", "TaskExecLog" ] + }, + "EventHandler.TaskDetails" : { + "content" : { + "output" : { + "key" : "sample_value" + }, + "taskRefName" : "sample_taskRefName", + "workflowId" : "sample_workflowId", + "taskId" : "sample_taskId" + }, + "dependencies" : [ ] + }, + "Group" : { + "content" : { + "roles" : [ "${Role}" ], + "defaultAccess" : { + "${ResourceType}" : [ "${Access}" ] + }, + "description" : "sample_description", + "id" : "sample_id" + }, + "dependencies" : [ "Role", "ResourceType", "Access" ] + }, + "SubjectRef" : { + "content" : { + "id" : "sample_id", + "type" : "USER" + }, + "dependencies" : [ ] + }, + "TaskResult.Status" : { + "content" : "IN_PROGRESS", + "enums" : { + "values" : [ "IN_PROGRESS", "FAILED", "FAILED_WITH_TERMINAL_ERROR", "COMPLETED" ], + "constants" : { + "IN_PROGRESS" : "(0)", + "FAILED_WITH_TERMINAL_ERROR" : "(2)", + "COMPLETED" : "(3)", + "FAILED" : "(1)" + }, + "sampleValue" : "IN_PROGRESS" + }, + "dependencies" : [ ] + }, + "WorkflowTestRequest" : { + "content" : { + "subWorkflowTestRequest" : { + "sample_key" : "${WorkflowTestRequest1}" + }, + "taskRefToMockOutput" : { + "sample_key" : [ "${WorkflowTestRequest.TaskMock}" ] + } + }, + "dependencies" : [ "WorkflowTestRequest.TaskMock" ], + "inherits" : ["StartWorkflowRequest"] + }, + "WorkflowTestRequest1" : { + "content" : { + "subWorkflowTestRequest" : { }, + "taskRefToMockOutput" : { + "sample_key" : [ "${WorkflowTestRequest.TaskMock}" ] + } + }, + "dependencies" : [ "WorkflowTestRequest.TaskMock" ], + "inherits" : ["StartWorkflowRequest"] + }, + "EventHandler" : { + "content" : { + "condition" : "sample_condition", + "evaluatorType" : "sample_evaluatorType", + "name" : "sample_name", + "active" : true, + "event" : "sample_event", + "actions" : [ "${EventHandler.Action}" ] + }, + "dependencies" : [ "EventHandler.Action" ] + }, + "DynamicForkJoinTaskList" : { + "content" : { + "dynamicTasks" : [ "${DynamicForkJoinTask}" ] + }, + "dependencies" : [ "DynamicForkJoinTask" ] + }, + "CreateOrUpdateApplicationRequest" : { + "content" : { + "name" : "sample_name" + }, + "dependencies" : [ ] + }, + "WorkflowDef.TimeoutPolicy" : { + "content" : "TIME_OUT_WF", + "enums" : { + "values" : [ "TIME_OUT_WF", "ALERT_ONLY" ], + "constants" : { + "ALERT_ONLY" : "(1)", + "TIME_OUT_WF" : "(0)" + }, + "sampleValue" : "TIME_OUT_WF" + }, + "dependencies" : [ ] + }, + "Auditable" : { + "content" : { + "updatedBy" : "sample_updatedBy", + "createTime" : 123, + "createdBy" : "sample_createdBy", + "updateTime" : 123, + "ownerApp" : "sample_ownerApp" + }, + "dependencies" : [ ] + }, + "CreateAccessKeyResponse" : { + "content" : { + "id" : "sample_id", + "secret" : "sample_secret" + }, + "dependencies" : [ ] + }, + "Workflow.WorkflowStatus" : { + "content" : "RUNNING", + "enums" : { + "values" : [ "RUNNING", "COMPLETED", "FAILED", "TIMED_OUT", "TERMINATED", "PAUSED" ], + "constants" : { + "PAUSED" : "(false, true)", + "COMPLETED" : "(true, true)", + "FAILED" : "(true, false)", + "RUNNING" : "(false, false)", + "TIMED_OUT" : "(true, false)", + "TERMINATED" : "(true, false)" + }, + "sampleValue" : "RUNNING" + }, + "dependencies" : [ ] + }, + "SearchResult" : { + "content" : { + "totalHits" : 123, + "results" : [ "${T}" ] + }, + "dependencies" : [ "T" ] + }, + "ResourceType" : { + "content" : "WORKFLOW_DEF", + "enums" : { + "values" : [ "WORKFLOW", "WORKFLOW_DEF", "WORKFLOW_SCHEDULE", "EVENT_HANDLER", "TASK_DEF", "TASK_REF_NAME", "TASK_ID", "APPLICATION", "USER", "SECRET_NAME", "ENV_VARIABLE", "TAG", "DOMAIN", "INTEGRATION_PROVIDER", "INTEGRATION", "PROMPT", "USER_FORM_TEMPLATE", "SCHEMA", "CLUSTER_CONFIG", "WEBHOOK" ], + "constants" : { + "DOMAIN" : "(12)", + "SECRET_NAME" : "(9)", + "ENV_VARIABLE" : "(10)", + "CLUSTER_CONFIG" : "(18)", + "APPLICATION" : "(7)", + "PROMPT" : "(15)", + "TASK_DEF" : "(4)", + "USER" : "(8)", + "USER_FORM_TEMPLATE" : "(16)", + "INTEGRATION_PROVIDER" : "(13)", + "WORKFLOW_DEF" : "(1)", + "INTEGRATION" : "(14)", + "SCHEMA" : "(17)", + "WEBHOOK" : "(19)", + "EVENT_HANDLER" : "(3)", + "TASK_REF_NAME" : "(5)", + "TAG" : "(11)", + "WORKFLOW_SCHEDULE" : "(2)", + "WORKFLOW" : "(0)", + "TASK_ID" : "(6)" + }, + "sampleValue" : "WORKFLOW" + }, + "dependencies" : [ ] + }, + "GrantedAccessResponse" : { + "content" : { + "grantedAccess" : [ "${GrantedAccess}" ] + }, + "dependencies" : [ "GrantedAccess" ] + }, + "TaskDef.TimeoutPolicy" : { + "content" : "RETRY", + "enums" : { + "values" : [ "RETRY", "TIME_OUT_WF", "ALERT_ONLY" ], + "constants" : { + "ALERT_ONLY" : "(2)", + "TIME_OUT_WF" : "(1)", + "RETRY" : "(0)" + }, + "sampleValue" : "RETRY" + }, + "dependencies" : [ ] + }, + "WorkflowStatus" : { + "content" : { + "output" : { + "key" : "sample_value" + }, + "variables" : { + "key" : "sample_value" + }, + "correlationId" : "sample_correlationId", + "workflowId" : "sample_workflowId", + "status" : "${Workflow.WorkflowStatus}" + }, + "dependencies" : [ "Workflow.WorkflowStatus" ] + }, + "IntegrationDefFormField.Option" : { + "content" : { + "label" : "sample_label", + "value" : "sample_value" + }, + "dependencies" : [ ] + }, + "TaskSummary" : { + "content" : { + "scheduledTime" : "sample_scheduledTime", + "externalInputPayloadStoragePath" : "sample_externalInputPayloadStoragePath", + "workflowPriority" : 123, + "updateTime" : "sample_updateTime", + "executionTime" : 123, + "output" : "sample_output", + "input" : "sample_input", + "taskType" : "sample_taskType", + "reasonForIncompletion" : "sample_reasonForIncompletion", + "domain" : "sample_domain", + "queueWaitTime" : 123, + "taskDefName" : "sample_taskDefName", + "workflowType" : "sample_workflowType", + "correlationId" : "sample_correlationId", + "startTime" : "sample_startTime", + "endTime" : "sample_endTime", + "workflowId" : "sample_workflowId", + "taskId" : "sample_taskId", + "status" : "${Task.Status}", + "externalOutputPayloadStoragePath" : "sample_externalOutputPayloadStoragePath" + }, + "dependencies" : [ "Task.Status" ] + }, + "IdempotencyStrategy" : { + "content" : "FAIL", + "enums" : { + "values" : [ "FAIL", "RETURN_EXISTING", "FAIL_ON_RUNNING" ], + "constants" : { + "FAIL_ON_RUNNING" : "(2)", + "RETURN_EXISTING" : "(1)", + "FAIL" : "(0)" + }, + "sampleValue" : "FAIL" + }, + "dependencies" : [ ] + }, + "Tag" : { + "content" : { + "value" : "sample_value", + "key" : "sample_key" + }, + "dependencies" : [ ] + }, + "ExternalStorageLocation" : { + "content" : { + "path" : "sample_path", + "uri" : "sample_uri" + }, + "dependencies" : [ ] + }, + "WorkflowSummary" : { + "content" : { + "externalInputPayloadStoragePath" : "sample_externalInputPayloadStoragePath", + "updateTime" : "sample_updateTime", + "priority" : 123, + "version" : 123, + "failedReferenceTaskNames" : "sample_failedReferenceTaskNames", + "output" : "sample_output", + "executionTime" : 123, + "input" : "sample_input", + "failedTaskNames" : [ "sample_failedTaskNames" ], + "createdBy" : "sample_createdBy", + "reasonForIncompletion" : "sample_reasonForIncompletion", + "workflowType" : "sample_workflowType", + "correlationId" : "sample_correlationId", + "startTime" : "sample_startTime", + "endTime" : "sample_endTime", + "event" : "sample_event", + "workflowId" : "sample_workflowId", + "status" : "${Workflow.WorkflowStatus}", + "externalOutputPayloadStoragePath" : "sample_externalOutputPayloadStoragePath" + }, + "dependencies" : [ "Workflow.WorkflowStatus" ] + }, + "Task" : { + "content" : { + "retried" : true, + "outputData" : { + "sample_key" : "sample_value" + }, + "referenceTaskName" : "sample_referenceTaskName", + "workflowPriority" : 123, + "isolationGroupId" : "sample_isolationGroupId", + "parentTaskId" : "sample_parentTaskId", + "executed" : true, + "subworkflowChanged" : true, + "taskType" : "sample_taskType", + "reasonForIncompletion" : "sample_reasonForIncompletion", + "responseTimeoutSeconds" : 123, + "rateLimitFrequencyInSeconds" : 123, + "iteration" : 123, + "correlationId" : "sample_correlationId", + "startTime" : 123, + "callbackFromWorker" : true, + "workflowType" : "sample_workflowType", + "workflowInstanceId" : "sample_workflowInstanceId", + "seq" : 123, + "startDelayInSeconds" : 123, + "inputData" : { + "sample_key" : "sample_value" + }, + "callbackAfterSeconds" : 123, + "workerId" : "sample_workerId", + "scheduledTime" : 123, + "externalInputPayloadStoragePath" : "sample_externalInputPayloadStoragePath", + "retryCount" : 123, + "workflowTask" : "${WorkflowTask}", + "retriedTaskId" : "sample_retriedTaskId", + "updateTime" : 123, + "pollCount" : 123, + "rateLimitPerFrequency" : 123, + "subWorkflowId" : "sample_subWorkflowId", + "domain" : "sample_domain", + "taskDefName" : "sample_taskDefName", + "endTime" : 123, + "executionNameSpace" : "sample_executionNameSpace", + "firstStartTime" : 123, + "taskId" : "sample_taskId", + "status" : "${Task.Status}", + "externalOutputPayloadStoragePath" : "sample_externalOutputPayloadStoragePath" + }, + "dependencies" : [ "Task.Status", "WorkflowTask" ] + }, + "AuthorizationRequest" : { + "content" : { + "access" : [ "${Access}" ], + "subject" : "${SubjectRef}", + "target" : "${TargetRef}" + }, + "dependencies" : [ "SubjectRef", "TargetRef", "Access" ] + }, + "EventExecution" : { + "content" : { + "output" : { + "sample_key" : "sample_value" + }, + "created" : 123, + "name" : "sample_name", + "messageId" : "sample_messageId", + "action" : "${EventHandler.Action.Type}", + "id" : "sample_id", + "event" : "sample_event", + "status" : "${EventExecution.Status}" + }, + "dependencies" : [ "EventExecution.Status", "EventHandler.Action.Type" ] + }, + "ConductorApplication" : { + "content" : { + "updatedBy" : "sample_updatedBy", + "createdBy" : "sample_createdBy", + "createTime" : 123, + "name" : "sample_name", + "updateTime" : 123, + "id" : "sample_id" + }, + "dependencies" : [ ] + }, + "DynamicForkJoinTask" : { + "content" : { + "input" : { + "sample_key" : "sample_value" + }, + "taskName" : "sample_taskName", + "workflowName" : "sample_workflowName", + "type" : "SIMPLE", + "referenceName" : "sample_referenceName" + }, + "dependencies" : [ ] + }, + "EventHandler.TerminateWorkflow" : { + "content" : { + "workflowId" : "sample_workflowId", + "terminationReason" : "sample_terminationReason" + }, + "dependencies" : [ ] + }, + "WorkflowStateUpdate" : { + "content" : { + "variables" : { + "sample_key" : "sample_value" + }, + "taskReferenceName" : "sample_taskReferenceName", + "taskResult" : "${TaskResult}" + }, + "dependencies" : [ "TaskResult" ] + }, + "TaskDef.RetryLogic" : { + "content" : "FIXED", + "enums" :{ + "values" : [ "FIXED", "EXPONENTIAL_BACKOFF", "LINEAR_BACKOFF" ], + "constants" : { + "EXPONENTIAL_BACKOFF" : "(1)", + "FIXED" : "(0)", + "LINEAR_BACKOFF" : "(2)" + }, + "sampleValue" : "FIXED" + }, + "dependencies" : [ ] + }, + "ChatMessage" : { + "content" : { + "role" : "sample_role", + "message" : "sample_message" + }, + "dependencies" : [ ] + }, + "IndexDocInput" : { + "content" : { + "metadata" : { + "sample_key" : "sample_value" + }, + "chunkSize" : 123, + "chunkOverlap" : 123, + "llmProvider" : "sample_llmProvider", + "docId" : "sample_docId", + "vectorDB" : "sample_vectorDB", + "index" : "sample_index", + "mediaType" : "sample_mediaType", + "url" : "sample_url", + "namespace" : "sample_namespace", + "embeddingModelProvider" : "sample_embeddingModelProvider", + "model" : "sample_model", + "text" : "sample_text", + "embeddingModel" : "sample_embeddingModel", + "dimensions" : 123 + }, + "dependencies" : [ ] + }, + "SkipTaskRequest" : { + "content" : { + "taskInput" : { + "sample_key" : "sample_value" + }, + "taskOutput" : { + "sample_key" : "sample_value" + } + }, + "dependencies" : [ ] + }, + "PollData" : { + "content" : { + "workerId" : "sample_workerId", + "lastPollTime" : 123, + "queueName" : "sample_queueName", + "domain" : "sample_domain" + }, + "dependencies" : [ ] + }, + "ChatMessage.Actor" : { + "content" : { + "values" : [ "user", "assistant", "system", "human", "chatbot" ], + "constants" : { + "chatbot" : "(4)", + "system" : "(2)", + "assistant" : "(1)", + "user" : "(0)", + "human" : "(3)" + }, + "sampleValue" : "user" + }, + "dependencies" : [ ] + }, + "WorkflowScheduleExecutionModel.State" : { + "content" : { + "values" : [ "POLLED", "FAILED", "EXECUTED" ], + "constants" : { + "FAILED" : "(1)", + "EXECUTED" : "(2)", + "POLLED" : "(0)" + }, + "sampleValue" : "POLLED" + }, + "dependencies" : [ ] + }, + "IntegrationApi" : { + "content" : { + "integrationName" : "sample_integrationName", + "configuration" : { + "${ConfigKey}" : "sample_Object" + }, + "description" : "sample_description", + "api" : "sample_api", + "enabled" : true, + "tags" : [ "${Tag}" ] + }, + "dependencies" : [ "ConfigKey", "Tag" ] + }, + "UpsertUserRequest" : { + "content" : { + "roles" : [ "USER" ], + "name" : "sample_name", + "groups" : [ "sample_groups" ] + }, + "dependencies" : [ ] + }, + "Role" : { + "content" : { + "permissions" : [ "${Permission}" ], + "name" : "sample_name" + }, + "dependencies" : [ "Permission" ] + }, + "ChatCompletion" : { + "content" : { + "instructions" : "sample_instructions", + "jsonOutput" : true, + "messages" : [ "${ChatMessage}" ] + }, + "dependencies" : [ "ChatMessage" ], + "inherits" : ["LLMWorkerInput"] + }, + "TokenResponse" : { + "content" : { + "token" : "sample_token" + }, + "dependencies" : [ ] + }, + "TaskDef" : { + "content" : { + "timeoutPolicy" : "${TaskDef.TimeoutPolicy}", + "inputKeys" : [ "sample_inputKeys" ], + "concurrentExecLimit" : 123, + "isolationGroupId" : "sample_isolationGroupId", + "retryCount" : 123, + "description" : "sample_description", + "inputTemplate" : { + "sample_key" : "sample_value" + }, + "ownerEmail" : "sample_ownerEmail", + "baseType" : "sample_baseType", + "totalTimeoutSeconds" : 123, + "retryDelaySeconds" : 123, + "backoffScaleFactor" : 123, + "rateLimitPerFrequency" : 123, + "retryLogic" : "${TaskDef.RetryLogic}", + "responseTimeoutSeconds" : 123, + "name" : "sample_name", + "timeoutSeconds" : 123, + "rateLimitFrequencyInSeconds" : 123, + "outputKeys" : [ "sample_outputKeys" ], + "executionNameSpace" : "sample_executionNameSpace", + "pollTimeoutSeconds" : 123, + "inputSchema" : "${SchemaDef}", + "outputSchema" : "${SchemaDef}", + "enforceSchema" : true + }, + "dependencies" : [ "TaskDef.TimeoutPolicy", "TaskDef.RetryLogic", "SchemaDef" ], + "inherits" : ["Auditable"] + }, + "WorkflowDefSummary" : { + "content" : { + "createTime" : 123, + "name" : "sample_name", + "version" : 123 + }, + "dependencies" : [ ] + }, + "EventHandler.Action" : { + "content" : { + "terminate_workflow" : "${EventHandler.TerminateWorkflow}", + "fail_task" : "${EventHandler.TaskDetails}", + "expandInlineJSON" : true, + "action" : "start_workflow", + "start_workflow" : "${EventHandler.StartWorkflow}", + "complete_task" : "${EventHandler.TaskDetails}", + "update_workflow_variables" : "${EventHandler.UpdateWorkflowVariables}" + }, + "dependencies" : [ "EventHandler.Action.Type", "EventHandler.StartWorkflow", "EventHandler.TaskDetails", "EventHandler.TerminateWorkflow", "EventHandler.UpdateWorkflowVariables" ] + }, + "EventExecution.Status" : { + "content" : "IN_PROGRESS", + "enums" : { + "values" : [ "IN_PROGRESS", "COMPLETED", "FAILED", "SKIPPED" ], + "constants" : { + "IN_PROGRESS" : "(0)", + "COMPLETED" : "(1)", + "FAILED" : "(2)", + "SKIPPED" : "(3)" + }, + "sampleValue" : "IN_PROGRESS" + }, + "dependencies" : [ ] + }, + "UpsertGroupRequest" : { + "content" : { + "roles" : [ "USER" ], + "defaultAccess" : { + "${ResourceType}" : [ "${Access}" ] + }, + "description" : "sample_description" + }, + "dependencies" : [ "ResourceType", "Access" ] + }, + "ConductorUser" : { + "content" : { + "encryptedIdDisplayValue" : "sample_encryptedIdDisplayValue", + "roles" : [ "${Role}" ], + "encryptedId" : true, + "name" : "sample_name", + "groups" : [ "${Group}" ], + "id" : "sample_id", + "uuid" : "sample_uuid" + }, + "dependencies" : [ "Role", "Group" ] + }, + "EventHandler.UpdateWorkflowVariables" : { + "content" : { + "variables" : { + "key" : "sample_value" + }, + "appendArray" : true, + "workflowId" : "sample_workflowId" + }, + "dependencies" : [ ] + }, + "Category" : { + "content" : "API", + "enums" : { + "values" : [ "API", "AI_MODEL", "VECTOR_DB", "RELATIONAL_DB", "MESSAGE_BROKER", "GIT", "EMAIL" ], + "constants" : { + "AI_MODEL" : "(1)", + "GIT" : "(5)", + "API" : "(0)", + "RELATIONAL_DB" : "(3)", + "VECTOR_DB" : "(2)", + "EMAIL" : "(6)", + "MESSAGE_BROKER" : "(4)" + }, + "sampleValue" : "API" + }, + "dependencies" : [ ] + }, + "TaskExecLog" : { + "content" : { + "log" : "sample_log", + "createdTime" : 123, + "taskId" : "sample_taskId" + }, + "dependencies" : [ ] + }, + "WorkflowTask" : { + "content" : { + "joinOn" : [ "sample_joinOn" ], + "description" : "sample_description", + "scriptExpression" : "sample_scriptExpression", + "cacheConfig" : "${WorkflowTask.CacheConfig}", + "type" : "sample_type", + "inputParameters" : { + "sample_key" : "sample_value" + }, + "decisionCases" : { + "sample_key" : [ "${WorkflowTask1}" ] + }, + "loopOver" : [ "${WorkflowTask1}" ], + "caseExpression" : "sample_caseExpression", + "defaultExclusiveJoinTask" : [ "sample_defaultExclusiveJoinTask" ], + "taskDefinition" : "${TaskDef}", + "caseValueParam" : "sample_caseValueParam", + "dynamicForkTasksInputParamName" : "sample_dynamicForkTasksInputParamName", + "expression" : "sample_expression", + "loopCondition" : "sample_loopCondition", + "asyncComplete" : true, + "sink" : "sample_sink", + "rateLimited" : true, + "retryCount" : 123, + "subWorkflowParam" : "${SubWorkflowParams1}", + "optional" : true, + "joinStatus" : "sample_joinStatus", + "evaluatorType" : "sample_evaluatorType", + "dynamicTaskNameParam" : "sample_dynamicTaskNameParam", + "name" : "sample_name", + "startDelay" : 123, + "permissive" : true, + "taskReferenceName" : "sample_taskReferenceName", + "defaultCase" : [ "${WorkflowTask1}" ], + "forkTasks" : [ [ "${WorkflowTask1}" ] ], + "dynamicForkTasksParam" : "sample_dynamicForkTasksParam", + "onStateChange" : { + "sample_key" : [ "${StateChangeEvent}" ] + }, + "dynamicForkJoinTasksParam" : "sample_dynamicForkJoinTasksParam" + }, + "dependencies" : [ "SubWorkflowParams", "TaskDef", "WorkflowTask.CacheConfig", "StateChangeEvent" ] + }, + "WorkflowTask1" : { + "content" : { + "joinOn" : [ "sample_joinOn" ], + "description" : "sample_description", + "scriptExpression" : "sample_scriptExpression", + "cacheConfig" : "${WorkflowTask.CacheConfig}", + "type" : "sample_type", + "inputParameters" : { + "sample_key" : "sample_value" + }, + "decisionCases" : { }, + "loopOver" : [ ], + "caseExpression" : "sample_caseExpression", + "defaultExclusiveJoinTask" : [ "sample_defaultExclusiveJoinTask" ], + "taskDefinition" : "${TaskDef}", + "caseValueParam" : "sample_caseValueParam", + "dynamicForkTasksInputParamName" : "sample_dynamicForkTasksInputParamName", + "expression" : "sample_expression", + "loopCondition" : "sample_loopCondition", + "asyncComplete" : true, + "sink" : "sample_sink", + "rateLimited" : true, + "retryCount" : 123, + "subWorkflowParam" : null, + "optional" : true, + "joinStatus" : "sample_joinStatus", + "evaluatorType" : "sample_evaluatorType", + "dynamicTaskNameParam" : "sample_dynamicTaskNameParam", + "name" : "sample_name", + "startDelay" : 123, + "permissive" : true, + "taskReferenceName" : "sample_taskReferenceName", + "defaultCase" : [ ], + "forkTasks" : [ [ ] ], + "dynamicForkTasksParam" : "sample_dynamicForkTasksParam", + "onStateChange" : { + "sample_key" : [ "${StateChangeEvent}" ] + }, + "dynamicForkJoinTasksParam" : "sample_dynamicForkJoinTasksParam" + }, + "dependencies" : [ "SubWorkflowParams", "TaskDef", "WorkflowTask.CacheConfig" ] + }, + "StoreEmbeddingsInput" : { + "content" : { + "embeddings" : [ 3.14 ], + "metadata" : { + "sample_key" : "sample_value" + }, + "vectorDB" : "sample_vectorDB", + "namespace" : "sample_namespace", + "index" : "sample_index", + "id" : "sample_id" + }, + "dependencies" : [ ], + "inherits" : ["LLMWorkerInput"] + }, + "StartWorkflowRequest" : { + "content" : { + "input" : { + "sample_key" : "sample_value" + }, + "externalInputPayloadStoragePath" : "sample_externalInputPayloadStoragePath", + "createdBy" : "sample_createdBy", + "name" : "sample_name", + "correlationId" : "sample_correlationId", + "taskToDomain" : { + "sample_key" : "sample_value" + }, + "priority" : 123, + "version" : 123, + "workflowDef" : "${WorkflowDef}", + "idempotencyKey" : "sample_idempotencyKey", + "idempotencyStrategy" : "${IdempotencyStrategy}" + }, + "dependencies" : [ "WorkflowDef", "IdempotencyStrategy" ] + }, + "AccessKeyStatus" : { + "content" : "ACTIVE", + "enums" : { + "values" : [ "ACTIVE", "INACTIVE" ], + "constants" : { + "ACTIVE" : "(0)", + "INACTIVE" : "(1)" + }, + "sampleValue" : "ACTIVE" + }, + "dependencies" : [ ] + }, + "SchemaDef" : { + "content" : { + "data" : { + "sample_key" : "sample_value" + }, + "name" : "sample_name", + "type" : "${SchemaDef.Type}", + "version" : 1, + "externalRef" : "sample_externalRef" + }, + "dependencies" : [ "SchemaDef.Type" ], + "inherits" : ["Auditable"] + }, + "WorkflowTask.CacheConfig" : { + "content" : { + "key" : "sample_key", + "ttlInSecond" : 123 + }, + "dependencies" : [ ] + }, + "EventHandler.StartWorkflow" : { + "content" : { + "input" : { + "key" : "sample_value" + }, + "name" : "sample_name", + "correlationId" : "sample_correlationId", + "taskToDomain" : { + "key" : "sample_value" + }, + "version" : 123 + }, + "dependencies" : [ ] + }, + "IntegrationDef" : { + "content" : { + "iconName" : "sample_iconName", + "configuration" : [ "${IntegrationDefFormField}" ], + "name" : "sample_name", + "categoryLabel" : "sample_categoryLabel", + "description" : "sample_description", + "type" : "sample_type", + "category" : "${Category}", + "enabled" : true, + "tags" : [ "sample_tags" ] + }, + "dependencies" : [ "Category", "IntegrationDefFormField" ] + }, + "EventHandler.Action.Type" : { + "content" : "start_workflow", + "enums" : { + "values" : [ "start_workflow", "complete_task", "fail_task", "terminate_workflow", "update_workflow_variables" ], + "constants" : { + "terminate_workflow" : "(3)", + "fail_task" : "(2)", + "start_workflow" : "(0)", + "complete_task" : "(1)", + "update_workflow_variables" : "(4)" + }, + "sampleValue" : "start_workflow" + }, + "dependencies" : [ ] + }, + "Integration" : { + "content" : { + "apis" : [ "${IntegrationApi}" ], + "configuration" : { + "${ConfigKey}" : "sample_Object" + }, + "name" : "sample_name", + "description" : "sample_description", + "modelsCount" : 123, + "type" : "sample_type", + "category" : "${Category}", + "enabled" : true, + "tags" : [ "${Tag}" ] + }, + "dependencies" : [ "Category", "ConfigKey", "Tag", "IntegrationApi", "Auditable" ] + }, + "ConfigKey" : { + "content" : "api_key", + "enums" : { + "values" : [ "api_key", "user", "endpoint", "authUrl", "environment", "projectName", "indexName", "publisher", "password", "namespace", "batchSize", "batchWaitTime", "visibilityTimeout", "connectionType", "consumer", "stream", "batchPollConsumersCount", "consumer_type", "region", "awsAccountId", "externalId", "roleArn", "protocol", "mechanism", "port", "schemaRegistryUrl", "schemaRegistryApiKey", "schemaRegistryApiSecret", "authenticationType", "truststoreAuthenticationType", "tls", "cipherSuite", "pubSubMethod", "keyStorePassword", "keyStoreLocation", "schemaRegistryAuthType", "valueSubjectNameStrategy", "datasourceURL", "jdbcDriver", "subscription", "serviceAccountCredentials", "file", "tlsFile", "queueManager", "groupId", "channel", "dimensions", "distance_metric", "indexing_method", "inverted_list_count" ], + "constants" : { + "awsAccountId" : "(PASSWORD)", + "schemaRegistryAuthType" : "(TEXT)", + "tlsFile" : "(FILE)", + "pubSubMethod" : "(TEXT)", + "groupId" : "(TEXT)", + "channel" : "(TEXT)", + "consumer_type" : "(TEXT)", + "valueSubjectNameStrategy" : "(TEXT)", + "cipherSuite" : "(TEXT)", + "serviceAccountCredentials" : "(TEXT)", + "connectionType" : "(TEXT)", + "password" : "(PASSWORD)", + "protocol" : "(TEXT)", + "schemaRegistryApiSecret" : "(PASSWORD)", + "roleArn" : "(TEXT)", + "indexName" : "(TEXT)", + "indexing_method" : "(TEXT)", + "port" : "(TEXT)", + "publisher" : "(TEXT)", + "projectName" : "(TEXT)", + "region" : "(TEXT)", + "keyStoreLocation" : "(TEXT)", + "subscription" : "(TEXT)", + "visibilityTimeout" : "(TEXT)", + "truststoreAuthenticationType" : "(TEXT)", + "authUrl" : "(TEXT)", + "endpoint" : "(TEXT)", + "file" : "(FILE)", + "queueManager" : "(TEXT)", + "stream" : "(TEXT)", + "mechanism" : "(TEXT)", + "datasourceURL" : "(TEXT)", + "consumer" : "(TEXT)", + "schemaRegistryUrl" : "(TEXT)", + "jdbcDriver" : "(TEXT)", + "keyStorePassword" : "(PASSWORD)", + "inverted_list_count" : "(TEXT)", + "externalId" : "(PASSWORD)", + "environment" : "(TEXT)", + "api_key" : "(PASSWORD)", + "batchPollConsumersCount" : "(TEXT)", + "namespace" : "(TEXT)", + "batchWaitTime" : "(TEXT)", + "tls" : "(TEXT)", + "authenticationType" : "(TEXT)", + "distance_metric" : "(TEXT)", + "batchSize" : "(TEXT)", + "schemaRegistryApiKey" : "(TEXT)", + "user" : "(TEXT)", + "dimensions" : "(TEXT)" + }, + "sampleValue" : "api_key" + }, + "dependencies" : [ ] + }, + "TextCompletion" : { + "content" : { }, + "dependencies" : [ ], + "inherits" : ["LLMWorkerInput"] + }, + "PromptTemplateTestRequest" : { + "content" : { + "promptVariables" : { + "key" : "sample_value" + }, + "stopWords" : [ "sample_stopWords" ], + "llmProvider" : "sample_llmProvider", + "temperature" : 123.456, + "model" : "sample_model", + "prompt" : "sample_prompt", + "topP" : 123.456 + }, + "dependencies" : [ ] + }, + "BulkResponse" : { + "content" : { + "bulkErrorResults" : { + "sample_key" : "sample_value" + }, + "bulkSuccessfulResults" : [ "${T}" ], + "message" : "sample_message" + }, + "dependencies" : [ "T" ] + }, + "VectorDBInput" : { + "content" : { + "embeddings" : [ 3.14 ], + "metadata" : { + "sample_key" : "sample_value" + }, + "vectorDB" : "sample_vectorDB", + "query" : "sample_query", + "namespace" : "sample_namespace", + "index" : "sample_index", + "dimensions" : 123 + }, + "dependencies" : [ ], + "inherits" : ["LLMWorkerInput"] + }, + "AccessKeyResponse" : { + "content" : { + "createdAt" : 123, + "id" : "sample_id", + "status" : "${AccessKeyStatus}" + }, + "dependencies" : [ "AccessKeyStatus" ] + }, + "Subject" : { + "content" : { + "id" : "sample_id", + "type" : "${SubjectType}" + }, + "dependencies" : [ "SubjectType" ] + }, + "EmbeddingRequest" : { + "content" : { + "llmProvider" : "sample_llmProvider", + "model" : "sample_model", + "text" : "sample_text", + "dimensions" : 123 + }, + "dependencies" : [ ] + }, + "WorkflowScheduleExecutionModel" : { + "content" : { + "scheduleName" : "sample_scheduleName", + "executionTime" : 123, + "executionId" : "sample_executionId", + "reason" : "sample_reason", + "scheduledTime" : 123, + "startWorkflowRequest" : "${StartWorkflowRequest}", + "zoneId" : "sample_zoneId", + "workflowName" : "sample_workflowName", + "stackTrace" : "sample_stackTrace", + "state" : "POLLED", + "workflowId" : "sample_workflowId", + "orgId" : "sample_orgId" + }, + "dependencies" : [ "StartWorkflowRequest" ] + }, + "LLMResponse" : { + "content" : { + "result" : "sample_result", + "finishReason" : "sample_finishReason", + "tokenUsed" : 123 + }, + "dependencies" : [ ] + }, + "TargetRef" : { + "content" : { + "id" : "sample_id", + "type" : "${ResourceType}" + }, + "dependencies" : [ "ResourceType" ] + }, + "UpgradeWorkflowRequest" : { + "content" : { + "workflowInput" : { + "sample_key" : "sample_value" + }, + "name" : "sample_name", + "taskOutput" : { + "sample_key" : "sample_value" + }, + "version" : 123 + }, + "dependencies" : [ ] + }, + "RateLimitConfig" : { + "content" : { + "concurrentExecLimit" : 123, + "rateLimitKey" : "sample_rateLimitKey" + }, + "dependencies" : [ ] + }, + "RerunWorkflowRequest" : { + "content" : { + "workflowInput" : { + "sample_key" : "sample_value" + }, + "reRunFromWorkflowId" : "sample_reRunFromWorkflowId", + "taskInput" : { + "sample_key" : "sample_value" + }, + "correlationId" : "sample_correlationId", + "reRunFromTaskId" : "sample_reRunFromTaskId" + }, + "dependencies" : [ ] + }, + "WorkflowSchedule" : { + "content" : { + "updatedTime" : 123, + "paused" : true, + "updatedBy" : "sample_updatedBy", + "description" : "sample_description", + "pausedReason" : "sample_pausedReason", + "runCatchupScheduleInstances" : true, + "tags" : [ "${Tag}" ], + "scheduleStartTime" : 123, + "cronExpression" : "sample_cronExpression", + "startWorkflowRequest" : "${StartWorkflowRequest}", + "createTime" : 123, + "createdBy" : "sample_createdBy", + "name" : "sample_name", + "zoneId" : "sample_zoneId", + "scheduleEndTime" : 123 + }, + "dependencies" : [ "Tag", "StartWorkflowRequest" ] + }, + "T": { + "content": { + "value": "sample_string_value" + }, + "dependencies": [] + }, + "IntegrationUpdate": { + "content": { + "type": "sample_type", + "category": "${Category}", + "description": "sample_description", + "configuration": { + "${ConfigKey}": "sample_Object" + }, + "enabled": true + }, + "dependencies": ["Category", "ConfigKey"] + }, + "IntegrationApiUpdate": { + "content": { + "description": "sample_description", + "configuration": { + "${ConfigKey}": "sample_Object" + }, + "enabled": true, + "maxTokens": 123, + "frequency": "${IntegrationApiUpdate.Frequency}" + }, + "dependencies": ["ConfigKey", "IntegrationApiUpdate.Frequency"] + }, + + "IntegrationApiUpdate.Frequency": { + "content": "DAILY", + "enums": { + "values": ["DAILY", "WEEKLY", "MONTHLY"], + "constants": { + "DAILY": "(daily)", + "WEEKLY": "(weekly)", + "MONTHLY": "(monthly)" + }, + "sampleValue": "DAILY" + }, + "dependencies": [] + } + } +} \ No newline at end of file diff --git a/tests/serdesertest/util/serdeser_json_resolver_utility.py b/tests/serdesertest/util/serdeser_json_resolver_utility.py new file mode 100644 index 000000000..cd1fea676 --- /dev/null +++ b/tests/serdesertest/util/serdeser_json_resolver_utility.py @@ -0,0 +1,229 @@ +import json +import os +import copy +from pathlib import Path + + +class JsonTemplateResolver: + """Utility class for resolving JSON templates from a predefined resource file.""" + + _templates_root = None + _template_resource_path = "ser_deser_json_string.json" + + @classmethod + def load_templates(cls): + """Loads the templates from the predefined resource file.""" + # Look for the file in the current directory + current_dir = Path(__file__).parent + file_path = current_dir / cls._template_resource_path + + if not file_path.exists(): + raise FileNotFoundError(f"Resource not found: {cls._template_resource_path}") + + with open(file_path, 'r') as f: + root = json.load(f) + + if "templates" not in root: + raise ValueError("JSON template does not contain 'templates' root element") + + cls._templates_root = root["templates"] + + @classmethod + def get_json_string(cls, template_name): + """ + Gets the JSON string for a specified template. + + Args: + template_name: The name of the template to resolve + + Returns: + The resolved template as a JSON string + """ + if cls._templates_root is None: + cls.load_templates() + + # Get the template with inheritance handling + resolved_node = cls._resolve_template_with_inheritance(template_name, set()) + + # Resolve references in the node + cls._resolve_references(resolved_node, set()) + + # Convert to string and return + return json.dumps(resolved_node) + + @classmethod + def _resolve_template_with_inheritance(cls, template_name, processed_templates): + """ + Resolves a template including all inherited fields from parent templates. + + Args: + template_name: The name of the template to resolve + processed_templates: Set of already processed templates to avoid circular inheritance + + Returns: + The resolved template content with all inherited fields + """ + if template_name in processed_templates: + print(f"Warning: Circular inheritance detected for {template_name}") + return {} + + processed_templates.add(template_name) + + if template_name not in cls._templates_root: + raise ValueError(f"Template '{template_name}' not found") + + template = cls._templates_root[template_name] + + if "content" not in template: + raise ValueError(f"Template '{template_name}' does not contain 'content' node") + + content_node = template["content"] + + # If content is not a dict (e.g., it's a string, number, boolean), return it directly + if not isinstance(content_node, dict): + return copy.deepcopy(content_node) + + # Create a deep copy of the content node + result_node = copy.deepcopy(content_node) + + # Process inheritance if present + if "inherits" in template and isinstance(template["inherits"], list): + for parent_name in template["inherits"]: + # Resolve parent template + parent_node = cls._resolve_template_with_inheritance(parent_name, set(processed_templates)) + + # Only merge if parent is a dict + if isinstance(parent_node, dict): + cls._merge_nodes(result_node, parent_node) + + return result_node + + @classmethod + def _merge_nodes(cls, target, source): + """ + Merges fields from the source node into the target node. + Fields in the target node are not overwritten if they already exist. + """ + if isinstance(source, dict): + for field_name, source_value in source.items(): + # Only add the field if it doesn't exist in the target + if field_name not in target: + if isinstance(source_value, dict) and field_name in target and isinstance(target[field_name], dict): + # Recursively merge objects + cls._merge_nodes(target[field_name], source_value) + else: + # Add the field + target[field_name] = copy.deepcopy(source_value) + + @classmethod + def _resolve_references(cls, node, processed_dependencies): + """Resolves references in a JSON node.""" + if isinstance(node, dict): + cls._resolve_object_references(node, processed_dependencies) + elif isinstance(node, list): + cls._resolve_array_references(node, processed_dependencies) + + @classmethod + def _resolve_object_references(cls, obj_node, processed_dependencies): + """Resolves references in an object node.""" + # Collect field names to avoid RuntimeError during iteration + fields_to_process = list(obj_node.keys()) + + for field_name in fields_to_process: + field_value = obj_node[field_name] + + # Check if the field name is a reference that needs to be resolved + if cls._is_reference(field_name): + reference_name = cls._extract_reference_name(field_name) + + # Use a clone of the processed dependencies for each field name + field_dependencies = set(processed_dependencies) + + if reference_name in field_dependencies: + # Circular reference detected + print(f"Warning: Circular reference detected for {reference_name}") + continue + + field_dependencies.add(reference_name) + + # Resolve the template to get the actual key name + resolved_reference = cls._resolve_template_with_inheritance(reference_name, set()) + + # Only apply if the resolved reference is a simple value (string, number, etc.) + if not isinstance(resolved_reference, (dict, list)): + resolved_key = str(resolved_reference) + + # Remove the original reference key and add the resolved key with the same value + original_value = obj_node.pop(field_name) + obj_node[resolved_key] = original_value + + # Update the field name for further processing + field_name = resolved_key + field_value = original_value + + # Check if the field value is a string reference + if isinstance(field_value, str): + text_value = field_value + if cls._is_reference(text_value): + reference_name = cls._extract_reference_name(text_value) + + # Use a clone of the processed dependencies for each field + field_dependencies = set(processed_dependencies) + + if reference_name in field_dependencies: + # Circular reference detected + print(f"Warning: Circular reference detected for {reference_name}") + continue + + field_dependencies.add(reference_name) + + # Resolve the template WITH inheritance + resolved_reference = cls._resolve_template_with_inheritance(reference_name, set()) + + # Resolve any references in the resolved template + cls._resolve_references(resolved_reference, field_dependencies) + obj_node[field_name] = resolved_reference + elif isinstance(field_value, (dict, list)): + # Use a clone of processed dependencies for nested structures + cls._resolve_references(field_value, set(processed_dependencies)) + + @classmethod + def _resolve_array_references(cls, array_node, processed_dependencies): + """Resolves references in an array node.""" + for i in range(len(array_node)): + element = array_node[i] + + if isinstance(element, str): + text_value = element + if cls._is_reference(text_value): + reference_name = cls._extract_reference_name(text_value) + + # Clone the dependencies for each array element + element_dependencies = set(processed_dependencies) + + if reference_name in element_dependencies: + # Circular reference detected + print(f"Warning: Circular reference detected for {reference_name}") + continue + + element_dependencies.add(reference_name) + + # Resolve the template WITH inheritance + resolved_reference = cls._resolve_template_with_inheritance(reference_name, set()) + + # Resolve any references in the resolved template + cls._resolve_references(resolved_reference, element_dependencies) + array_node[i] = resolved_reference + elif isinstance(element, (dict, list)): + # Recursively process nested objects and arrays + cls._resolve_references(element, set(processed_dependencies)) + + @staticmethod + def _is_reference(value): + """Checks if a string value is a template reference.""" + return isinstance(value, str) and value.startswith("${") and value.endswith("}") + + @staticmethod + def _extract_reference_name(reference): + """Extracts the reference name from a reference string.""" + return reference[2:-1] \ No newline at end of file From def1163c184e504f4c7607ebdfd5d03ca7c120e1 Mon Sep 17 00:00:00 2001 From: harshilraval Date: Tue, 6 May 2025 19:13:37 +0530 Subject: [PATCH 2/2] serdeser tests --- ...pdate_application_request_serdeser_test.py | 45 +++++++++++ .../event_handler_serdeser_test.py | 81 +++++++++++++++++++ 2 files changed, 126 insertions(+) create mode 100644 tests/serdesertest/create_or_update_application_request_serdeser_test.py create mode 100644 tests/serdesertest/event_handler_serdeser_test.py diff --git a/tests/serdesertest/create_or_update_application_request_serdeser_test.py b/tests/serdesertest/create_or_update_application_request_serdeser_test.py new file mode 100644 index 000000000..ce279f124 --- /dev/null +++ b/tests/serdesertest/create_or_update_application_request_serdeser_test.py @@ -0,0 +1,45 @@ +import unittest +import json + +from conductor.client.http.models import CreateOrUpdateApplicationRequest +from serdesertest.util.serdeser_json_resolver_utility import JsonTemplateResolver + + +class TestCreateOrUpdateApplicationRequest(unittest.TestCase): + """Test case for serialization and deserialization of CreateOrUpdateApplicationRequest model.""" + + def setUp(self): + """Set up test fixtures.""" + self.server_json_str = JsonTemplateResolver.get_json_string("CreateOrUpdateApplicationRequest") + self.server_json = json.loads(self.server_json_str) + + def test_deserialize_serialize(self): + """Test deserialization from JSON and serialization back to JSON.""" + # 1. Deserialize server JSON into model object + model = CreateOrUpdateApplicationRequest() + model_dict = self.server_json + + # Set attributes from JSON + if 'name' in model_dict: + model.name = model_dict['name'] + + # 2. Verify all fields are properly populated + expected_name = self.server_json.get('name') + self.assertEqual(model.name, expected_name, + f"Field 'name' was not properly deserialized. Expected: {expected_name}, Got: {model.name}") + + # 3. Serialize model back to JSON + serialized_dict = model.to_dict() + + # 4. Verify the resulting JSON matches the original + # Check field by field to make detailed assertions + self.assertEqual(serialized_dict.get('name'), self.server_json.get('name'), + "Field 'name' did not match after serialization") + + # Verify overall dictionary equality + self.assertEqual(serialized_dict, self.server_json, + "Serialized JSON doesn't match the original server JSON") + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/serdesertest/event_handler_serdeser_test.py b/tests/serdesertest/event_handler_serdeser_test.py new file mode 100644 index 000000000..221929f4f --- /dev/null +++ b/tests/serdesertest/event_handler_serdeser_test.py @@ -0,0 +1,81 @@ +import unittest +import json +from conductor.client.http.models.event_handler import EventHandler +from conductor.client.http.models.action import Action +from serdesertest.util.serdeser_json_resolver_utility import JsonTemplateResolver + + +class TestEventHandlerSerDe(unittest.TestCase): + """Test serialization and deserialization of EventHandler model""" + + def setUp(self): + # Load template JSON using resolver + self.server_json_str = JsonTemplateResolver.get_json_string("EventHandler") + self.server_json = json.loads(self.server_json_str) + + def test_deserialize_serialize(self): + """Test deserialization and serialization of EventHandler model""" + # 1. Deserialize server JSON into SDK model object + # Create Action objects first for the actions list + actions = [] + if self.server_json.get('actions'): + for action_json in self.server_json.get('actions'): + # Assuming Action constructor takes the same parameters + # You may need to adjust this based on the Action class definition + action = Action(**action_json) + actions.append(action) + + # Create EventHandler object using constructor + model = EventHandler( + name=self.server_json.get('name'), + event=self.server_json.get('event'), + condition=self.server_json.get('condition'), + actions=actions, + active=self.server_json.get('active'), + evaluator_type=self.server_json.get('evaluatorType') + ) + + # 2. Verify all fields are properly populated + self.assertEqual(model.name, self.server_json.get('name')) + self.assertEqual(model.event, self.server_json.get('event')) + self.assertEqual(model.condition, self.server_json.get('condition')) + self.assertEqual(model.active, self.server_json.get('active')) + self.assertEqual(model.evaluator_type, self.server_json.get('evaluatorType')) + + # Verify actions list + self.assertIsNotNone(model.actions) + self.assertEqual(len(model.actions), len(self.server_json.get('actions', []))) + + # If actions exist in the JSON, verify each action is properly deserialized + if self.server_json.get('actions'): + for i, action in enumerate(model.actions): + self.assertIsInstance(action, Action) + # Further verification of Action properties could be done here + + # 3. Serialize the model back to JSON + result_json = model.to_dict() + + # 4. Ensure the resulting JSON matches the original + # Verify field mapping between camelCase and snake_case + self.assertEqual(result_json.get('name'), self.server_json.get('name')) + self.assertEqual(result_json.get('event'), self.server_json.get('event')) + self.assertEqual(result_json.get('condition'), self.server_json.get('condition')) + self.assertEqual(result_json.get('active'), self.server_json.get('active')) + + # The SDK uses evaluator_type internally but the JSON may use evaluatorType + # Check how the field is serialized in the result + if 'evaluator_type' in result_json: + self.assertEqual(result_json.get('evaluator_type'), self.server_json.get('evaluatorType')) + elif 'evaluatorType' in result_json: + self.assertEqual(result_json.get('evaluatorType'), self.server_json.get('evaluatorType')) + + # Verify complex structures like lists + if self.server_json.get('actions'): + self.assertEqual(len(result_json.get('actions')), len(self.server_json.get('actions'))) + + # Additional validation of actions could be done here + # This would depend on how Action.to_dict() handles serialization + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file