diff --git a/changelogs/fragments/523-refactor-lookup-plugins.yml b/changelogs/fragments/523-refactor-lookup-plugins.yml new file mode 100644 index 00000000..cb6de6dd --- /dev/null +++ b/changelogs/fragments/523-refactor-lookup-plugins.yml @@ -0,0 +1,34 @@ +--- +minor_changes: + - >- + cluster_moid - Fix bug where lookup would return incosistent results for objects in nested paths. + Fixes issues https://github.com/ansible-collections/vmware.vmware_rest/issues/500 https://github.com/ansible-collections/vmware.vmware_rest/pull/445 + https://github.com/ansible-collections/vmware.vmware_rest/issues/324 (https://github.com/ansible-collections/vmware.vmware_rest/pull/523) + - >- + datacenter_moid - Fix bug where lookup would return incosistent results for objects in nested paths + Fixes issues https://github.com/ansible-collections/vmware.vmware_rest/issues/500 https://github.com/ansible-collections/vmware.vmware_rest/pull/445 + https://github.com/ansible-collections/vmware.vmware_rest/issues/324 (https://github.com/ansible-collections/vmware.vmware_rest/pull/523) + - >- + datastore_moid - Fix bug where lookup would return incosistent results for objects in nested paths + Fixes issues https://github.com/ansible-collections/vmware.vmware_rest/issues/500 https://github.com/ansible-collections/vmware.vmware_rest/pull/445 + https://github.com/ansible-collections/vmware.vmware_rest/issues/324 (https://github.com/ansible-collections/vmware.vmware_rest/pull/523) + - >- + folder_moid - Fix bug where lookup would return incosistent results for objects in nested paths + Fixes issues https://github.com/ansible-collections/vmware.vmware_rest/issues/500 https://github.com/ansible-collections/vmware.vmware_rest/pull/445 + https://github.com/ansible-collections/vmware.vmware_rest/issues/324 (https://github.com/ansible-collections/vmware.vmware_rest/pull/523) + - >- + host_moid - Fix bug where lookup would return incosistent results for objects in nested paths + Fixes issues https://github.com/ansible-collections/vmware.vmware_rest/issues/500 https://github.com/ansible-collections/vmware.vmware_rest/pull/445 + https://github.com/ansible-collections/vmware.vmware_rest/issues/324 (https://github.com/ansible-collections/vmware.vmware_rest/pull/523) + - >- + network_moid - Fix bug where lookup would return incosistent results for objects in nested paths + Fixes issues https://github.com/ansible-collections/vmware.vmware_rest/issues/500 https://github.com/ansible-collections/vmware.vmware_rest/pull/445 + https://github.com/ansible-collections/vmware.vmware_rest/issues/324 (https://github.com/ansible-collections/vmware.vmware_rest/pull/523) + - >- + resource_pool_moid - Fix bug where lookup would return incosistent results for objects in nested paths + Fixes issues https://github.com/ansible-collections/vmware.vmware_rest/issues/500 https://github.com/ansible-collections/vmware.vmware_rest/pull/445 + https://github.com/ansible-collections/vmware.vmware_rest/issues/324 (https://github.com/ansible-collections/vmware.vmware_rest/pull/523) + - >- + vm_moid - Fix bug where lookup would return incosistent results for objects in nested paths + Fixes issues https://github.com/ansible-collections/vmware.vmware_rest/issues/500 https://github.com/ansible-collections/vmware.vmware_rest/pull/445 + https://github.com/ansible-collections/vmware.vmware_rest/issues/324 (https://github.com/ansible-collections/vmware.vmware_rest/pull/523) diff --git a/plugins/plugin_utils/lookup.py b/plugins/plugin_utils/lookup.py index 2f0c3eb0..cc458784 100644 --- a/plugins/plugin_utils/lookup.py +++ b/plugins/plugin_utils/lookup.py @@ -12,109 +12,84 @@ from ansible.errors import AnsibleLookupError from ansible.module_utils._text import to_native -from ansible_collections.cloud.common.plugins.module_utils.turbo.exceptions import ( - EmbeddedModuleFailure, -) from ansible_collections.vmware.vmware_rest.plugins.module_utils.vmware_rest import ( gen_args, open_session, ) -INVENTORY = { +FILTER_MAPPINGS = { "resource_pool": { - "list": { - "query": { - "clusters": "clusters", - "datacenters": "datacenters", - "hosts": "hosts", - "names": "names", - "parent_resource_pools": "parent_resource_pools", - "resource_pools": "resource_pools", - } - } + "parent_resource_pools": "parent_resource_pools", + "resource_pools": "resource_pools", }, "datacenter": { - "list": { - "query": { - "datacenters": "datacenters", - "folders": "folders", - "names": "names", - } - } - }, - "folder": { - "list": { - "query": { - "datacenters": "datacenters", - "folders": "folders", - "names": "names", - "parent_folders": "parent_folders", - "type": "type", - } - } + "parent_folders": "folders", }, + "folder": {}, "cluster": { - "list": { - "query": { - "clusters": "clusters", - "datacenters": "datacenters", - "folders": "folders", - "names": "names", - } - } + "parent_folders": "folders", }, "host": { - "list": { - "query": { - "clusters": "clusters", - "datacenters": "datacenters", - "folders": "folders", - "hosts": "hosts", - "names": "names", - } - } + "parent_folders": "folders", }, "datastore": { - "list": { - "query": { - "datacenters": "datacenters", - "datastores": "datastores", - "folders": "folders", - "names": "names", - "types": "types", - } - } + "parent_folders": "folders", }, "vm": { - "list": { - "query": { - "clusters": "clusters", - "datacenters": "datacenters", - "folders": "folders", - "hosts": "hosts", - "names": "names", - "resource_pools": "resource_pools", - "vms": "vms", - } - } - }, - "network": { - "list": { - "query": { - "datacenters": "datacenters", - "folders": "folders", - "names": "names", - "networks": "networks", - "types": "types", - } - } + "parent_folders": "folders", }, + "network": {"parent_folders": "folders"}, } +class VcenterApi: + def __init__(self, hostname, session): + self.hostname = hostname + self.session = session + + def build_url(self, object_type, filters): + corrected_filters_for_query = self.correct_filter_names(filters, object_type) + object_type = object_type.replace("_", "-") + + return (f"https://{self.hostname}/api/vcenter/{object_type}") + gen_args( + corrected_filters_for_query, corrected_filters_for_query.keys() + ) + + def correct_filter_names(self, filters, object_type): + """ + Objects in vSphere have slightly different filter names. For example, some use 'parent_folders' and some use 'folders'. + Its easier to read the code if we do all of the filter corrections at the end using a map. + Params: + filters: dict, The active filters that should be applied to the REST request + """ + if object_type not in FILTER_MAPPINGS.keys(): + raise AnsibleLookupError( + "object_type must be one of [%s]." + % ", ".join(list(FILTER_MAPPINGS.keys())) + ) + corrected_filters = {} + for filter_key, filter_value in filters.items(): + try: + corrected_filters[FILTER_MAPPINGS[object_type][filter_key]] = ( + filter_value + ) + except KeyError: + corrected_filters[filter_key] = filter_value + + return corrected_filters + + async def fetch_object_with_filters(self, object_type, filters): + _url = self.build_url(object_type, filters) + async with self.session.get(_url) as response: + return await response.json() + + class Lookup: - def __init__(self, options): + def __init__(self, options, session): self._options = options + self.api = VcenterApi(options["vcenter_hostname"], session) + self.active_filters = {} + self.object_type = options["object_type"] @classmethod async def entry_point(cls, terms, options): @@ -123,7 +98,6 @@ async def entry_point(cls, terms, options): "Option _terms is required but no object has been specified" ) session = None - try: session = await open_session( vcenter_hostname=options["vcenter_hostname"], @@ -132,446 +106,186 @@ async def entry_point(cls, terms, options): validate_certs=options.get("vcenter_validate_certs"), log_file=options.get("vcenter_rest_log_file"), ) - except EmbeddedModuleFailure as e: + except Exception as e: raise AnsibleLookupError( - f'Unable to connect to vCenter or ESXi API at {options.get("vcenter_hostname")}: {to_native(e)}' + f'Unable to connect to vCenter or ESXi API at {options["vcenter_hostname"]}: {to_native(e)}' ) - lookup = cls(options) - lookup._options["session"] = session - - task = asyncio.ensure_future(lookup.moid(terms[0])) + lookup = cls(options, session) + lookup._options["_terms"] = terms[0] + task = asyncio.create_task(lookup.search_for_object_moid_top_down()) return await task - async def fetch(self, url): - async with self._options["session"].get(url) as response: - result = await response.json() - return result - - def build_url(self, object_type, params): - try: - _in_query_parameters = INVENTORY[object_type]["list"]["query"].keys() - if object_type == "resource_pool": - object_type = object_type.replace("_", "-") - except KeyError: - raise AnsibleLookupError( - "object_type must be one of [%s]." % ", ".join(list(INVENTORY.keys())) - ) - - return ( - f"https://{self._options['vcenter_hostname']}/api/vcenter/{object_type}" - ) + gen_args(params, _in_query_parameters) - - async def _helper_fetch(self, object_type, filters): - _url = self.build_url(object_type, filters) - return await self.fetch(_url) - - @staticmethod - def ensure_result(result, object_type, object_name=None): - object_name_decoded = None - - if object_name: - object_name_decoded = urllib.parse.unquote(object_name) - - if ( - not result - or object_name_decoded - and object_name_decoded not in result[0].values() - ): - return "" - - def _filter_result(result): - return [obj for obj in result if "%2f" not in obj["name"]] - - result = _filter_result(result) - if result and len(result) > 1: - raise AnsibleLookupError( - "More than one object available: [%s]." - % ", ".join( - list(f"{item['name']} => {item[object_type]}" for item in result) + async def search_for_object_moid_top_down(self): + """ + Searches for the lookup term in VSphere. Uses a top down approach to progress + through the path (for example /datacenter/vm/foo/bar/my-vm) until it reaches the + desired object. This guarantees we find the correct object even if multiple have the + same name, possibly at the cost of performance. + """ + object_path = self._options["_terms"] + return_all_children = object_path.endswith("/") + path_parts = [_part for _part in object_path.split("/") if _part] + + for index, path_part in enumerate(path_parts): + if not self.active_filters.get("datacenters"): + datacenter_moid = await self.get_object_moid_by_name_and_type( + path_part, "datacenter" ) - ) - try: - object_moid = result[0][object_type] - except (TypeError, KeyError, IndexError) as e: - raise AnsibleLookupError(to_native(e)) - return object_moid - - def _init_filter(self): - filters = {} - filters["datacenters"] = self._options["dc_moid"] - return filters - - async def _get_datacenter_moid(self, path): - filters = {} - dc_name = "" - dc_moid = "" - _result = "" - folder_moid = "" - _path = path + if self.object_type == "datacenter" or not datacenter_moid: + return datacenter_moid + self.active_filters["datacenters"] = datacenter_moid + continue + + if index == len(path_parts) - 1: + # were at the end of the object path. Either return the object, or return + # all of the objects it contains (for example, the children inside of a folder) + if return_all_children: + await self.process_intermediate_path_part(path_part) + return await self.get_all_children_in_object() + else: + return await self.get_object_moid_by_name_and_type(path_part) - # Retrieve folders MoID if any - folder_moid, _path = await self._get_folder_moid(path, filters) - - if _path: - dc_name = _path[0] - - filters["names"] = dc_name - filters["folders"] = folder_moid - _result = await self._helper_fetch("datacenter", filters) - dc_moid = self.ensure_result(_result, "datacenter", dc_name) - - return dc_moid, _path - - async def _fetch_result(self, object_path, object_type, filters): - _result = "" - obj_moid = "" - visited = [] - _object_path_list = list(object_path) - - _result = await self.recursive_folder_or_rp_moid_search( - object_path, object_type, filters - ) - if _result: - if object_path: - for obj in _result: - if _object_path_list: - if obj["name"] == _object_path_list[0]: - del _object_path_list[0] - else: - visited.append(obj) - if not visited: - obj_moid = [_result[-1]] else: - obj_moid = _result - return obj_moid, _object_path_list - - async def _get_folder_moid(self, object_path, filters): - object_name = "" - result = "" - _result = "" - _object_path = [] - - if object_path: - _result, _object_path = await self._fetch_result( - object_path, "folder", filters - ) - result = self.ensure_result(_result, "folder") - - if _result and self._options["object_type"] == "folder": - if isinstance(_result, list) and _object_path: - obj_path_set = set(_object_path) - path_set = set([elem["name"] for elem in _result]) - if path_set - obj_path_set and self._options["_terms"][-1] != "/": - return "", _object_path - - if self._options["_terms"][-1] != "/": - object_name = object_path[-1] - - result = self.ensure_result([_result[-1]], "folder", object_name) - - if ( - self._options["_terms"][-1] == "/" - and self._options["object_type"] == "folder" - ): - result = await self.look_inside(result, filters) - - return result, _object_path - - async def _get_host_moid(self, object_path, filters): - host_moid = "" - result = "" - _object_path = [] - - # Host might be inside a cluster - if self._options["object_type"] in ("host", "vm", "network", "datastore"): - filters["names"] = object_path[0] - cluster_moid, _object_path = await self._get_cluster_moid( - object_path, filters + # were in the middle of an object path, lookup the object at this level + # and add it to the filters for the next round of searching + await self.process_intermediate_path_part(path_part) + continue + + raise Exception("here4") + + async def process_intermediate_path_part(self, intermediate_object_name): + """ + Finds and returns the MoID for an object in the middle of a search path. Different vSphere objects can be + children of other types of vSphere objects. + - VMs could be in a resource pool, a host, or a folder + - Networks could be in a host, or in a folder + - Hosts could be in a cluster, or in a folder + - Datastores could in a host, or in a folder + - Resource pools could be in a cluster, or a host + We start with the most restrictive searches and progressively expand the search area until something is found. + We also update the filters to include the proper object filter for the next round of searches. + Params: + intermediate_object_name: str, The name of the current object to search for + Returns: + str or None, a single MoID or none if nothing was found + """ + if self.object_type == "vm": + result = await self.get_object_moid_by_name_and_type( + intermediate_object_name, "resource_pool" ) - if not cluster_moid: - return "", object_path[1:] - - filters = self._init_filter() - filters["clusters"] = cluster_moid - if _object_path: - filters["names"] = _object_path[0] - else: - filters["names"] = "" - - result = await self._helper_fetch("host", filters) - host_moid = self.ensure_result(result, "host", filters["names"]) + if result: + self.set_new_filters_with_datacenter({"resource_pools": result}) + return result - return host_moid, _object_path[1:] - - async def _helper_get_resource_pool_moid(self, object_path, filters): - result = "" - rp_moid = "" - _object_path = [] - - # Resource pool might be inside a cluster - filters["names"] = object_path[0] - cluster_moid, _object_path = await self._get_cluster_moid(object_path, filters) - if not cluster_moid: - return "", _object_path - - filters = self._init_filter() - filters["clusters"] = cluster_moid + if self.object_type in ("host", "resource_pool"): + result = await self.get_object_moid_by_name_and_type( + intermediate_object_name, "cluster" + ) + if result: + self.set_new_filters_with_datacenter({"clusters": result}) + return result - if _object_path: - result, _object_path = await self._fetch_result( - _object_path, "resource_pool", filters + if self.object_type in ("vm", "network", "datastore", "resource_pool"): + result = await self.get_object_moid_by_name_and_type( + intermediate_object_name, "host" ) - rp_moid = self.ensure_result(result, "resource_pool") - - if not result and _object_path: - filters = self._init_filter() - filters["names"] = _object_path[0] - filters["clusters"] = cluster_moid - # Resource pool might be inside a host - host_moid, _object_path = await self._get_host_moid(_object_path, filters) - if not host_moid: - return "", _object_path - - filters["hosts"] = host_moid - if _object_path: - filters["names"] = _object_path[0] - result, _object_path = await self._fetch_result( - _object_path, "resource_pool", filters - ) + if result: + self.set_new_filters_with_datacenter({"hosts": result}) + return result - if result and self._options["object_type"] == "resource_pool": - if isinstance(result, list) and _object_path: - obj_path_set = set(_object_path) - path_set = set([elem["name"] for elem in result]) - if path_set - obj_path_set and self._options["_terms"][-1] != "/": - return "", _object_path - - if ( - self._options["_terms"][-1] == "/" - and self._options["object_type"] == "resource_pool" - ): - result = await self.look_inside(rp_moid, filters) - return result, _object_path - - result = self.ensure_result(result, "resource_pool") - - return result, _object_path - - async def look_inside(self, pre_object, filters): - result = "" - _result = "" - filters["names"] = "" - object_type = self._options["object_type"] - - if pre_object: - if (object_type == "resource_pool" and "resgroup" in pre_object) or ( - object_type == "folder" and "group" in pre_object - ): - parent_key = f"parent_{object_type}s" - filters[parent_key] = pre_object - - object_key = f"{object_type}s" - filters[object_key] = "" - _result = await self._helper_fetch(object_type, filters) - result = self.ensure_result(_result, object_type) + # resource pools cant continue past this point + if self.object_type == "resource_pool": + return None + result = await self.get_object_moid_by_name_and_type( + intermediate_object_name, "folder" + ) + self.set_new_filters_with_datacenter({"parent_folders": result}) return result - async def _get_subset_moid(self, object_path, filters): - object_name = "" - result = "" - _result = "" - _object_path = [] - - if not object_path: - if self._options["_terms"][-1] != "/": - return "", object_path + async def get_object_moid_by_name_and_type(self, object_name, _object_type=None): + """ + Returns a single object MoID with a specific type, name, and filter set. If more than one object + is found, and error is thrown. + Params: + object_name: str, the name of the object to search for + _object_type: str, Optional name of the object type to search for. Defaults to the lookup plugin type + Returns: + str, a single MoID + """ + if not _object_type: + _object_type = self.object_type + + if _object_type == "datacenter": + _filters = {"folders": "group-d1"} else: - if self._options["_terms"][-1] != "/": - object_name = object_path[-1] - - filters["names"] = object_name - _result = await self._helper_fetch(self._options["object_type"], filters) - result = self.ensure_result(_result, self._options["object_type"]) - if not result: - filters["names"] = "" - - if self._options["object_type"] == "vm": - # VM might be in a resource pool - result, _object_path = await self._helper_get_resource_pool_moid( - object_path, filters - ) - if result: - return result - - # Object might be inside a host - host_moid, _object_path = await self._get_host_moid(object_path, filters) - if not host_moid: - return "" - - filters["hosts"] = host_moid - filters["folders"] = "" - filters["names"] = object_name - _result = await self._helper_fetch(self._options["object_type"], filters) - result = self.ensure_result(_result, self._options["object_type"]) + _filters = self.active_filters - return result + _filters["names"] = object_name + _result = await self.api.fetch_object_with_filters(_object_type, _filters) - async def _get_cluster_moid(self, object_path, filters): - cluster_moid = "" - result = await self._helper_fetch("cluster", filters) - cluster_moid = self.ensure_result(result, "cluster", filters["names"]) - - return cluster_moid, object_path[1:] - - async def get_all_objects_path_moid(self, object_path, object_type, filters): - # GET MoID of all the objects specified in the path - filters["names"] = list(set(object_path)) - - if self._options["object_type"] == "vm": - filters["type"] = "VIRTUAL_MACHINE" - elif self._options["object_type"] not in ("resource_pool", "cluster", "folder"): - filters["type"] = self._options[ - "object_type" - ].upper() # HOST, DATASTORE, DATACENTER, NETWORK - - return await self._helper_fetch(object_type, filters) - - async def recursive_folder_or_rp_moid_search( - self, - object_path, - object_type, - filters, - parent_object=None, - objects_moid=None, - result=None, - ): - if result is None: - # GET MoID of all the objects specified in the path - result = [] - objects_moid = await self.get_all_objects_path_moid( - object_path, object_type, filters - ) - if not objects_moid: - return "" - elif len(objects_moid) == 1: - return objects_moid - - return await self.recursive_folder_or_rp_moid_search( - object_path, - object_type, - filters, - objects_moid=objects_moid, - result=result, - ) - parent_key = f"parent_{object_type}s" - filters[parent_key] = "" - - if objects_moid and object_path and objects_moid[0]["name"] == object_path[0]: - # There exists a folder MoID with this name - filters["names"] = object_path[0] - - if parent_object is not None: - filters[parent_key] = parent_object - tasks = [ - asyncio.ensure_future(self._helper_fetch(object_type, filters)) - for parent_object_info in objects_moid - if parent_object_info["name"] == object_path[0] - ] - _result = [await i for i in tasks] - [ - result.append(elem[0]) - for elem in _result - if elem - if _result and elem[0] not in result - ] - else: - _result = await self._helper_fetch(object_type, filters) - if not _result: - return "" - [result.append(elem) for elem in _result if elem not in result] - # Update parent_object - parent_object = result[0][object_type] - return await self.recursive_folder_or_rp_moid_search( - object_path[1:], - object_type, - filters, - parent_object, - objects_moid[1:], - result, - ) - if not object_path or (objects_moid and len(objects_moid) == 0): - # Return result and what left in the path - if not result: - result = objects_moid - return result - - if result: - # Update parent_object - parent_object = result[-1][object_type] - return await self.recursive_folder_or_rp_moid_search( - object_path[1:], - object_type, - filters, - parent_object, - objects_moid[1:], - result, + object_moid = self.get_single_moid_from_result( + _result, _object_type, object_name ) + return object_moid - async def moid(self, object_path): - folder_moid = "" - result = "" - filters = {} - _path = [] - - if not object_path: - return "" - - # Split object_path for transversal - self._options["_terms"] = object_path - object_type = self._options["object_type"] - path = tuple(filter(None, object_path.split("/"))) - - # Retrieve datacenter MoID - dc_moid, _path = await self._get_datacenter_moid(path) - if object_type == "datacenter" or not dc_moid: - return dc_moid - self._options["dc_moid"] = dc_moid - filters["datacenters"] = self._options["dc_moid"] - - if _path: - _path = _path[1:] - - # Retrieve folders MoID - folder_moid, _path = await self._get_folder_moid(_path, filters) - if object_type == "folder" or not folder_moid: - return folder_moid - filters["folders"] = folder_moid - - if object_type == "cluster": - if object_path[-1] != "/": - # Save object name - filters["names"] = _path[-1] - else: - filters["names"] = "" - result, _obj_path = await self._get_cluster_moid(_path, filters) - - if object_type in ("datastore", "network"): - filters = self._init_filter() - filters["folders"] = folder_moid - result = await self._get_subset_moid(_path, filters) - - if object_type == "resource_pool": - result, _obj_path = await self._helper_get_resource_pool_moid( - _path, filters + @staticmethod + def get_single_moid_from_result(result, object_type, object_name=None): + """ + Parses vSphere returns query results as a json list, validates the results and extracts + the correct MoID + Params: + object_type: str, The type of object to search the results for + object_name: str, The name of the object to search the results for + Returns: + str or None, a single MoID or none if nothing was found + """ + if not result or not object_name: + return None + + object_name_decoded = urllib.parse.unquote(object_name) + if object_name_decoded not in result[0].values(): + return None + + results_with_decoded_names = [] + for obj in result: + if "%2f" in obj["name"]: + continue + results_with_decoded_names.append((obj["name"], obj[object_type])) + + if len(results_with_decoded_names) > 1: + raise AnsibleLookupError( + "More than one object found with matching name: [%s]." + % ", ".join( + [f"{item[0]} => {item[1]}" for item in results_with_decoded_names] + ) ) - if object_type == "vm": - result = await self._get_subset_moid(_path, filters) + try: + return results_with_decoded_names[0][1] + except (TypeError, KeyError, IndexError) as e: + raise AnsibleLookupError(to_native(e)) - if object_type == "host": - result, _obj_path = await self._get_host_moid(_path, filters) + async def get_all_children_in_object(self): + results = await self.api.fetch_object_with_filters( + self.object_type, self.active_filters + ) - return result + try: + return [result[self.object_type] for result in results] + except KeyError: + return None + + def set_new_filters_with_datacenter(self, new_filters): + """ + Deletes filter key value pairs from the active filter dict and replaces them with the new filters. + It will leave the datacenter filter since that is always used. + Params: + new_filters: dict, The new filters you want to apply as active + """ + _dc = self.active_filters.get("datacenters") + self.active_filters = new_filters + if _dc: + self.active_filters["datacenters"] = _dc