From f3cd4301a72561201e8bdcbbe9b1e67920272eaf Mon Sep 17 00:00:00 2001 From: Durman Date: Fri, 16 Dec 2022 14:37:05 +0400 Subject: [PATCH 1/3] Blender 3.4 does not allow to clear sockets of build-in nodes. --- tests/group_tests.py | 2 +- utils/sv_json_struct.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/group_tests.py b/tests/group_tests.py index 1d37078704..9d9a97c533 100644 --- a/tests/group_tests.py +++ b/tests/group_tests.py @@ -24,7 +24,7 @@ def test_grouping_nodes(self): importer.import_into_tree(new_tree, print_log=False) [setattr(n, 'select', False) for n in new_tree.nodes] [setattr(new_tree.nodes[n], 'select', True) for n in ['A Number', 'Formula', - 'Vector polar input', 'Vector polar output']] + 'Vector Polar Input', 'Vector Polar Output']] with self.subTest(msg="Grouping nodes"): bpy.ops.node.add_group_tree_from_selected() with self.subTest(msg="Ungrouping nodes"): diff --git a/utils/sv_json_struct.py b/utils/sv_json_struct.py index 3df84baae3..abb67f4270 100644 --- a/utils/sv_json_struct.py +++ b/utils/sv_json_struct.py @@ -648,14 +648,16 @@ def build(self, node, factories: StructFactory, imported_data: OldNewNames): # it will cause replacing of all sockets with wrong identifiers in the group node. # clearing and adding sockets of Group input and Group output nodes # immediately cause their rebuilding by Blender, so JSON file does not save information about their sockets. - if node.bl_idname not in {'NodeGroupInput', 'NodeGroupOutput'}: - node.inputs.clear() + build_in_id_names = { + 'NodeGroupInput', 'NodeGroupOutput', 'NodeFrame', 'NodeReroute'} + if node.bl_idname not in build_in_id_names: + node.inputs.clear() for sock_identifier, raw_struct in self._struct.get("inputs", dict()).items(): with self.logger.add_fail("Add in socket", f"Tree: {node.id_data.name}, Node {node.name}, Sock: {sock_identifier}"): factories.sock(sock_identifier, self.logger, raw_struct).build(node.inputs, factories, imported_data) - if node.bl_idname not in {'NodeGroupInput', 'NodeGroupOutput'}: + if node.bl_idname not in build_in_id_names: node.outputs.clear() for sock_identifier, raw_struct in self._struct.get("outputs", dict()).items(): with self.logger.add_fail("Add out socket", From 4d9d0d3586deaa13c634093bf129bbb91afdc543 Mon Sep 17 00:00:00 2001 From: Durman Date: Mon, 19 Dec 2022 09:23:59 +0400 Subject: [PATCH 2/3] Fix tests and improve logs --- old_nodes/__init__.py | 25 ++++++----- tests/json_import_tests.py | 4 +- tests/profile_mk3_tests.py | 2 +- utils/sv_json_struct.py | 18 ++++---- utils/testing.py | 85 ++++++++++++++++++-------------------- 5 files changed, 67 insertions(+), 67 deletions(-) diff --git a/old_nodes/__init__.py b/old_nodes/__init__.py index 36d9f396a5..079a85efa8 100644 --- a/old_nodes/__init__.py +++ b/old_nodes/__init__.py @@ -86,17 +86,22 @@ def has_old_nodes(tree) -> bool: def register_old(bl_id): """Register old node class""" - if bl_id in old_bl_idnames: - mod = importlib.import_module(".{}".format(old_bl_idnames[bl_id]), __name__) - res = inspect.getmembers(mod) - for name, cls in res: - if inspect.isclass(cls): - if issubclass(cls, bpy.types.Node) and cls.bl_idname == bl_id: - if bl_id not in imported_mods: - mod.register() - imported_mods[bl_id] = mod - else: + if bl_id in imported_mods: + return + if bl_id not in old_bl_idnames: raise LookupError(f"Cannot find {bl_id} among old nodes") + mod = importlib.import_module(".{}".format(old_bl_idnames[bl_id]), __name__) + res = inspect.getmembers(mod) + module_node_bl_idnames = set() # there can be multiple of them + for name, cls in res: + if inspect.isclass(cls): + if issubclass(cls, bpy.types.Node): + module_node_bl_idnames.add(cls.bl_idname) + if bl_id not in module_node_bl_idnames: + raise LookupError(f"Old_bl_idnames returns something wrong for {bl_id=}") + mod.register() + for bl_idname in module_node_bl_idnames: + imported_mods[bl_idname] = mod def register_all(): diff --git a/tests/json_import_tests.py b/tests/json_import_tests.py index 9eae4bfa10..309e2137df 100644 --- a/tests/json_import_tests.py +++ b/tests/json_import_tests.py @@ -74,7 +74,7 @@ def test_import_examples(self): for examples_path, category_name in example_categories_names(): - info("Opening Dir named: %s", category_name) + debug("Opening Dir named: %s", category_name) examples_set = examples_path / category_name for listed_path in examples_set.iterdir(): @@ -86,7 +86,7 @@ def test_import_examples(self): name = basename(path) if name in UNITTEST_SKIPLIST: - info(f"Skipping test import of: {name} - to permit unit-tests to continue") + info(f"Skipping test import of: {name} - due missing dependency") continue with self.subTest(file=name): diff --git a/tests/profile_mk3_tests.py b/tests/profile_mk3_tests.py index 9c684695da..e1e7c96fb0 100644 --- a/tests/profile_mk3_tests.py +++ b/tests/profile_mk3_tests.py @@ -114,7 +114,7 @@ def test_import_examples(self): with self.subTest(file=name): with open(path, 'r') as f: - info("Checking profile example: %s", name) + debug("Checking profile example: %s", name) profile_text = f.read() with self.assert_logs_no_errors(): parse_profile(profile_text) diff --git a/utils/sv_json_struct.py b/utils/sv_json_struct.py index abb67f4270..8deab77645 100644 --- a/utils/sv_json_struct.py +++ b/utils/sv_json_struct.py @@ -651,18 +651,18 @@ def build(self, node, factories: StructFactory, imported_data: OldNewNames): build_in_id_names = { 'NodeGroupInput', 'NodeGroupOutput', 'NodeFrame', 'NodeReroute'} if node.bl_idname not in build_in_id_names: - node.inputs.clear() - for sock_identifier, raw_struct in self._struct.get("inputs", dict()).items(): - with self.logger.add_fail("Add in socket", - f"Tree: {node.id_data.name}, Node {node.name}, Sock: {sock_identifier}"): - factories.sock(sock_identifier, self.logger, raw_struct).build(node.inputs, factories, imported_data) + node.inputs.clear() + for sock_identifier, raw_struct in self._struct.get("inputs", dict()).items(): + with self.logger.add_fail("Add in socket", + f"Tree: {node.id_data.name}, Node {node.name}, Sock: {sock_identifier}"): + factories.sock(sock_identifier, self.logger, raw_struct).build(node.inputs, factories, imported_data) if node.bl_idname not in build_in_id_names: node.outputs.clear() - for sock_identifier, raw_struct in self._struct.get("outputs", dict()).items(): - with self.logger.add_fail("Add out socket", - f"Tree: {node.id_data.name}, Node {node.name}, Sock: {sock_identifier}"): - factories.sock(sock_identifier, self.logger, raw_struct).build(node.outputs, factories, imported_data) + for sock_identifier, raw_struct in self._struct.get("outputs", dict()).items(): + with self.logger.add_fail("Add out socket", + f"Tree: {node.id_data.name}, Node {node.name}, Sock: {sock_identifier}"): + factories.sock(sock_identifier, self.logger, raw_struct).build(node.outputs, factories, imported_data) if hasattr(node, 'load_from_json'): with self.logger.add_fail("Setting advance node properties", diff --git a/utils/testing.py b/utils/testing.py index 819c24f2a2..2bd7822c2f 100644 --- a/utils/testing.py +++ b/utils/testing.py @@ -24,6 +24,9 @@ from sverchok.utils.modules_inspection import iter_submodule_names from sverchok.utils.sv_json_import import JSONImporter + +test_logger = logging.getLogger('tests') + try: import coverage coverage_available = True @@ -97,7 +100,7 @@ def create_node_tree(name=None, must_not_exist=True): if must_not_exist: if name in bpy.data.node_groups: raise Exception("Will not create tree `{}': it already exists".format(name)) - debug("Creating tree: %s", name) + test_logger.debug("Creating tree: %s", name) return bpy.data.node_groups.new(name=name, type="SverchCustomTreeType") def get_or_create_node_tree(name=None): @@ -107,7 +110,7 @@ def get_or_create_node_tree(name=None): if name is None: name = "TestingTree" if name in bpy.data.node_groups: - debug("Using existing tree: %s", name) + test_logger.debug("Using existing tree: %s", name) return bpy.data.node_groups[name] else: return create_node_tree(name) @@ -119,7 +122,7 @@ def get_node_tree(name=None): if name is None: name = "TestingTree" if name in bpy.data.node_groups: - debug("Using existing tree: %s", name) + test_logger.debug("Using existing tree: %s", name) return bpy.data.node_groups[name] else: raise Exception("There is no node tree named `{}'".format(name)) @@ -137,7 +140,7 @@ def remove_node_tree(name=None): if len(areas): space = areas[0].spaces[0] space.node_tree = None - debug("Removing tree: %s", name) + test_logger.debug("Removing tree: %s", name) tree = bpy.data.node_groups[name] bpy.data.node_groups.remove(tree) @@ -160,7 +163,7 @@ def link_node_tree(reference_blend_path, tree_name=None): if tree_name in bpy.data.node_groups: raise Exception("Tree named `{}' already exists in current scene".format(tree_name)) with bpy.data.libraries.load(reference_blend_path, link=True) as (data_src, data_dst): - info(f"---- Linked node tree: {basename(reference_blend_path)}") + test_logger.debug(f"---- Linked node tree: {basename(reference_blend_path)}") data_dst.node_groups = [tree_name] # right here the update method of the imported tree will be called # sverchok does not have a way of preventing this update @@ -182,7 +185,7 @@ def create_node(node_type, tree_name=None): """ if tree_name is None: tree_name = "TestingTree" - debug("Creating node of type %s", node_type) + test_logger.debug("Creating node of type %s", node_type) return bpy.data.node_groups[tree_name].nodes.new(type=node_type) def get_node(node_name, tree_name=None): @@ -203,6 +206,19 @@ def get_tests_path(): tests_dir = join(dirname(sv_init), "tests") return tests_dir + +@contextmanager +def logger_level(logger_name: str, level: str): + """Set temporary level to given logger""" + logger = logging.getLogger(logger_name) + initial_level = logger.level + logger.setLevel(level) + try: + yield None + finally: + logger.setLevel(initial_level) + + def run_all_tests(pattern=None): """ Run all existing test cases. @@ -212,45 +228,24 @@ def run_all_tests(pattern=None): if pattern is None: pattern = "*_tests.py" - tests_path = get_tests_path() - log_handler = logging.FileHandler(join(tests_path, "sverchok_tests.log"), mode='w') - logging.getLogger().addHandler(log_handler) - try: - loader = unittest.TestLoader() - suite = loader.discover(start_dir = tests_path, pattern = pattern) - buffer = StringIO() - runner = unittest.TextTestRunner(stream = buffer, verbosity=2) - old_nodes.register_all() - with coverage_report(): - result = runner.run(suite) - info("Test cases result:\n%s", buffer.getvalue()) - return result - finally: - logging.getLogger().removeHandler(log_handler) - - -def run_test_from_file(file_name): - """ - Run test from file given by name. File should be places in tests folder - :param file_name: string like avl_tree_tests.py - :return: result - """ tests_path = get_tests_path() log_handler = logging.FileHandler(join(tests_path, "sverchok_tests.log"), mode='w') logging.getLogger().addHandler(log_handler) buffer = None - try: - loader = unittest.TestLoader() - suite = loader.discover(start_dir=tests_path, pattern=file_name) - buffer = StringIO() - runner = unittest.TextTestRunner(stream=buffer, verbosity=2) - old_nodes.register_all() - result = runner.run(suite) - info("Test cases result:\n%s", buffer.getvalue()) - return result - finally: - logging.getLogger().removeHandler(log_handler) - return buffer.getvalue().split('\n')[-2] if buffer else "Global error" + with logger_level('sverchok', 'ERROR'): + try: + loader = unittest.TestLoader() + suite = loader.discover(start_dir = tests_path, pattern = pattern) + buffer = StringIO() + runner = unittest.TextTestRunner(stream = buffer, verbosity=2) + old_nodes.register_all() + with coverage_report(): + result = runner.run(suite) + test_logger.info("Test cases result:\n%s", buffer.getvalue()) + return result + finally: + logging.getLogger().removeHandler(log_handler) + return buffer.getvalue().split('\n')[-2] if buffer else "Global error" """ using: @@ -269,7 +264,7 @@ class SverchokTestCase(unittest.TestCase): """ def setUp(self): - debug("Starting test: %s", self.__class__.__name__) + test_logger.debug("Starting test: %s", self.__class__.__name__) @contextmanager def temporary_node_tree(self, new_tree_name): @@ -573,11 +568,11 @@ def emit(self, record): logging.getLogger().addHandler(handler) try: - debug("=== \/ === [%s] Here should be no errors === \/ ===", self.__class__.__name__) + test_logger.debug("=== \/ === [%s] Here should be no errors === \/ ===", self.__class__.__name__) yield handler self.assertFalse(has_errors, "There were some errors logged") finally: - debug("=== /\ === [%s] There should be no errors === /\ ===", self.__class__.__name__) + test_logger.debug("=== /\ === [%s] There should be no errors === /\ ===", self.__class__.__name__) logging.getLogger().handlers.remove(handler) def subtest_assert_equals(self, value1, value2, message=None): @@ -835,7 +830,7 @@ def execute(self, context): # making self.report after all tests lead to strange error, so no report for testing all run_all_tests() else: - test_result = run_test_from_file(self.test_module + '.py') + test_result = run_all_tests(self.test_module + '.py') self.report(type={'ERROR'} if test_result != 'OK' else {'INFO'}, message=test_result) return {'FINISHED'} From c508aed460af0f2222b48d1484c7d24e2fef1b39 Mon Sep 17 00:00:00 2001 From: Durman Date: Mon, 26 Dec 2022 11:55:30 +0400 Subject: [PATCH 3/3] fix function name --- ui/testing_panel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/testing_panel.py b/ui/testing_panel.py index 3c35252e67..47e01dfb11 100644 --- a/ui/testing_panel.py +++ b/ui/testing_panel.py @@ -37,7 +37,7 @@ def execute(self, context): # making self.report after all tests lead to strange error, so no report for testing all test.run_all_tests() else: - test_result = test.run_test_from_file(self.test_module + '.py') + test_result = test.run_all_tests(self.test_module + '.py') self.report(type={'ERROR'} if test_result != 'OK' else {'INFO'}, message=test_result) return {'FINISHED'}